sessionInfo()
## R version 3.5.1 (2018-07-02)
## Platform: x86_64-w64-mingw32/x64 (64-bit)
## Running under: Windows 10 x64 (build 17134)
## 
## Matrix products: default
## 
## locale:
## [1] LC_COLLATE=English_United States.1252 
## [2] LC_CTYPE=English_United States.1252   
## [3] LC_MONETARY=English_United States.1252
## [4] LC_NUMERIC=C                          
## [5] LC_TIME=English_United States.1252    
## 
## attached base packages:
## [1] stats     graphics  grDevices utils     datasets  methods   base     
## 
## loaded via a namespace (and not attached):
##  [1] compiler_3.5.1  magrittr_1.5    tools_3.5.1     htmltools_0.3.6
##  [5] yaml_2.2.0      Rcpp_1.0.0      stringi_1.2.4   rmarkdown_1.11 
##  [9] knitr_1.20      stringr_1.3.1   digest_0.6.18   evaluate_0.12

User Inputs

output.var = params$output.var 
transform.abs = params$transform.abs
log.pred = params$log.pred
eda = params$eda
algo.forward = params$algo.forward
algo.backward = params$algo.backward
algo.stepwise = params$algo.stepwise
algo.LASSO = params$algo.LASSO
algo.LARS = params$algo.LARS
  
algo.forward.caret = params$algo.forward.caret
algo.backward.caret = params$algo.backward.caret
algo.stepwise.caret = params$algo.stepwise.caret
algo.LASSO.caret = params$algo.LASSO.caret
algo.LARS.caret = params$algo.LARS.caret

message("Parameters used for training/prediction: ")
## Parameters used for training/prediction:
str(params)
## List of 14
##  $ output.var         : chr "y3"
##  $ transform.abs      : logi FALSE
##  $ log.pred           : logi FALSE
##  $ eda                : logi FALSE
##  $ algo.forward       : logi FALSE
##  $ algo.backward      : logi FALSE
##  $ algo.stepwise      : logi FALSE
##  $ algo.LASSO         : logi FALSE
##  $ algo.LARS          : logi FALSE
##  $ algo.forward.caret : logi TRUE
##  $ algo.backward.caret: logi TRUE
##  $ algo.stepwise.caret: logi TRUE
##  $ algo.LASSO.caret   : logi TRUE
##  $ algo.LARS.caret    : logi TRUE
# Setup Labels
# alt.scale.label.name = Alternate Scale variable name
#   - if predicting on log, then alt.scale is normal scale
#   - if predicting on normal scale, then alt.scale is log scale
if (log.pred == TRUE){
  label.names = paste('log.',output.var,sep="")
  alt.scale.label.name = output.var
}
if (log.pred == FALSE){
  label.names = output.var
  alt.scale.label.name = paste('log.',output.var,sep="")
}

Prepare Data

Read and Clean Features

features = read.csv("../../Data/features.csv")
features.highprec = read.csv("../../Data/features_highprec.csv")
all.equal(features, features.highprec)
##  [1] "Component \"x11\": Mean relative difference: 0.001401482"     
##  [2] "Component \"stat9\": Mean relative difference: 0.0002946299"  
##  [3] "Component \"stat12\": Mean relative difference: 0.0005151515" 
##  [4] "Component \"stat13\": Mean relative difference: 0.001354369"  
##  [5] "Component \"stat18\": Mean relative difference: 0.0005141104" 
##  [6] "Component \"stat22\": Mean relative difference: 0.001135977"  
##  [7] "Component \"stat25\": Mean relative difference: 0.0001884615" 
##  [8] "Component \"stat29\": Mean relative difference: 0.001083691"  
##  [9] "Component \"stat36\": Mean relative difference: 0.00021513"   
## [10] "Component \"stat37\": Mean relative difference: 0.0004578125" 
## [11] "Component \"stat43\": Mean relative difference: 0.0003473684" 
## [12] "Component \"stat45\": Mean relative difference: 0.0002951699" 
## [13] "Component \"stat46\": Mean relative difference: 0.0009745763" 
## [14] "Component \"stat47\": Mean relative difference: 8.829902e-05" 
## [15] "Component \"stat55\": Mean relative difference: 0.001438066"  
## [16] "Component \"stat57\": Mean relative difference: 0.0001056911" 
## [17] "Component \"stat58\": Mean relative difference: 0.0004882261" 
## [18] "Component \"stat60\": Mean relative difference: 0.0002408377" 
## [19] "Component \"stat62\": Mean relative difference: 0.0004885106" 
## [20] "Component \"stat66\": Mean relative difference: 1.73913e-06"  
## [21] "Component \"stat67\": Mean relative difference: 0.0006265823" 
## [22] "Component \"stat73\": Mean relative difference: 0.003846154"  
## [23] "Component \"stat75\": Mean relative difference: 0.002334906"  
## [24] "Component \"stat83\": Mean relative difference: 0.0005628415" 
## [25] "Component \"stat86\": Mean relative difference: 0.0006104418" 
## [26] "Component \"stat94\": Mean relative difference: 0.001005115"  
## [27] "Component \"stat97\": Mean relative difference: 0.0003551913" 
## [28] "Component \"stat98\": Mean relative difference: 0.0006157635" 
## [29] "Component \"stat106\": Mean relative difference: 0.0008267717"
## [30] "Component \"stat109\": Mean relative difference: 0.0005121359"
## [31] "Component \"stat110\": Mean relative difference: 0.0007615527"
## [32] "Component \"stat111\": Mean relative difference: 0.001336134" 
## [33] "Component \"stat114\": Mean relative difference: 7.680492e-05"
## [34] "Component \"stat117\": Mean relative difference: 0.0002421784"
## [35] "Component \"stat122\": Mean relative difference: 0.0006521084"
## [36] "Component \"stat123\": Mean relative difference: 8.333333e-05"
## [37] "Component \"stat125\": Mean relative difference: 0.002385135" 
## [38] "Component \"stat130\": Mean relative difference: 0.001874016" 
## [39] "Component \"stat132\": Mean relative difference: 0.0003193182"
## [40] "Component \"stat135\": Mean relative difference: 0.0001622517"
## [41] "Component \"stat136\": Mean relative difference: 7.722008e-05"
## [42] "Component \"stat138\": Mean relative difference: 0.0009739953"
## [43] "Component \"stat143\": Mean relative difference: 0.0004845361"
## [44] "Component \"stat146\": Mean relative difference: 0.0005821596"
## [45] "Component \"stat148\": Mean relative difference: 0.0005366922"
## [46] "Component \"stat153\": Mean relative difference: 0.0001557522"
## [47] "Component \"stat154\": Mean relative difference: 0.001351916" 
## [48] "Component \"stat157\": Mean relative difference: 0.0005427928"
## [49] "Component \"stat162\": Mean relative difference: 0.002622951" 
## [50] "Component \"stat167\": Mean relative difference: 0.0005905172"
## [51] "Component \"stat168\": Mean relative difference: 0.0002791096"
## [52] "Component \"stat169\": Mean relative difference: 0.0004121827"
## [53] "Component \"stat170\": Mean relative difference: 0.0004705882"
## [54] "Component \"stat174\": Mean relative difference: 0.0003822894"
## [55] "Component \"stat179\": Mean relative difference: 0.0008286604"
## [56] "Component \"stat184\": Mean relative difference: 0.0007526718"
## [57] "Component \"stat187\": Mean relative difference: 0.0005122768"
## [58] "Component \"stat193\": Mean relative difference: 4.215116e-05"
## [59] "Component \"stat199\": Mean relative difference: 0.002155844" 
## [60] "Component \"stat203\": Mean relative difference: 0.0003738318"
## [61] "Component \"stat213\": Mean relative difference: 0.000667676" 
## [62] "Component \"stat215\": Mean relative difference: 0.0003997955"
head(features)
##     JobName        x1       x2       x3        x4       x5        x6
## 1 Job_00001 2.0734508 4.917267 19.96188  3.520878 7.861051 1.6067589
## 2 Job_00002 2.2682543 4.955773 19.11939 19.763031 6.931355 1.3622041
## 3 Job_00003 1.7424456 2.059819 13.37912 38.829132 6.274053 2.0529845
## 4 Job_00004 0.7873555 2.613983 17.23044 64.402557 5.377652 0.9067419
## 5 Job_00005 2.3342753 4.299076 14.64883 52.537304 6.793368 2.4605792
## 6 Job_00006 1.2365089 2.795370 11.13127 96.819939 6.583971 2.3510606
##         x7        x8       x9       x10      x11      x12       x13
## 1 2.979479  8.537228 1.103368 4.6089458 1.05e-07 7.995825 13.215498
## 2 2.388119  6.561461 0.588572 1.0283282 1.03e-07 7.486966 22.557224
## 3 2.043592 10.275595 4.834385 4.3872848 1.06e-07 6.350142 15.049810
## 4 2.395118 13.487331 3.340190 4.5053501 9.47e-08 9.548698 17.170635
## 5 2.891535  9.362389 1.246039 1.7333300 1.01e-07 9.596095  5.794567
## 6 1.247838  7.033354 1.852231 0.4839371 1.07e-07 3.810983 23.863169
##        x14       x15       x16      x17      x18      x19      x20
## 1 4.377983 0.2370623  6.075459 3.988347 4.767475 2.698775 1.035893
## 2 2.059315 0.5638121  6.903891 4.152054 6.849232 9.620731 1.915288
## 3 3.260057 2.0603445  8.424065 4.489893 3.493591 4.715386 1.558103
## 4 3.093478 1.8806034 11.189792 2.134271 5.588357 5.107871 1.489588
## 5 3.943076 1.5820830  7.096742 3.563378 7.765610 1.360272 1.240283
## 6 1.280562 1.1733382  7.062051 1.341864 7.748325 5.009365 1.725179
##        x21      x22      x23      stat1      stat2      stat3      stat4
## 1 42.36548 1.356213 2.699796  2.3801832  0.1883335 -1.2284011 -0.5999233
## 2 26.63295 4.053961 2.375127 -1.4069480  1.8140973  1.6204884  2.6422672
## 3 20.09693 3.079888 4.488420 -0.7672566 -0.1230289  1.1415752  2.9805934
## 4 32.60415 1.355396 3.402398  0.4371202 -1.9355906  0.9028624 -1.6025400
## 5 44.58361 1.940301 2.249011  2.4492466 -0.6172000 -2.5520642 -2.1485929
## 6 28.75102 2.500499 5.563972 -1.7899084  1.8853619  2.4154840 -2.6022179
##          stat5      stat6      stat7      stat8      stat9      stat10
## 1  0.148893163 -0.6622978 -2.4851868  0.3647782  2.5364335  2.92067981
## 2  1.920768980  1.7411555 -1.9599979 -2.0190558 -1.3732762 -0.31642506
## 3  2.422584300 -0.4166040  2.2205689 -2.6741531  0.4844292  2.73379230
## 4 -0.001795933 -0.6946563 -0.3693534 -0.9709467  1.7960306  0.74771154
## 5 -2.311132430 -1.0166832  2.7269876  1.5424492 -1.3156369 -0.09767897
## 6 -1.785491470 -1.8599915  1.4875095  2.0188572 -1.4892503 -1.41103566
##       stat11    stat12     stat13     stat14     stat15     stat16
## 1 -2.3228905 -2.480567 -0.6335157 -0.3650149 -0.5322812  0.6029300
## 2 -0.8547903  1.119316  0.7227427  0.2121097 -0.1452281 -2.0361528
## 3 -2.1821580  2.865401 -2.9756081  2.9871745  1.9539525 -1.8857163
## 4  1.3982378  1.856765 -1.0379983  2.3341896  2.3057184 -2.8947697
## 5  0.9567220  2.567549  0.3184886  1.0307668  0.1644241 -0.6613821
## 6  0.5341771 -1.461822  0.4402476 -1.9282095 -0.3680157  1.8188807
##        stat17     stat18     stat19     stat20     stat21     stat22
## 1 -1.04516208  2.3544915  2.4049001  0.2633883 -0.9788178  1.7868229
## 2  0.09513074  0.4727738  1.8899702  2.7892542 -1.3919091 -1.7198164
## 3  0.40285346  1.4655282 -1.4952788  2.9162340 -2.3893208  2.8161423
## 4  2.97446084  2.3896182  2.3083484 -1.1894441 -2.1982553  1.3666242
## 5 -0.98465055  0.6900643  1.5894209 -2.1204538  1.7961155 -0.9362189
## 6 -1.45726359 -2.1139548 -0.3964904  1.1764175 -2.9100556 -2.1359294
##       stat23     stat24     stat25    stat26     stat27      stat28
## 1 -2.3718851  2.8580718 -0.4719713 -2.817086 -0.9518474  2.88892484
## 2 -2.3293245  1.5577759 -1.9569720  1.554194 -0.5081459 -1.58715141
## 3 -2.5402296  0.1422861  0.3572798 -1.051886 -2.1541717  0.03074004
## 4 -1.9679050 -1.4077642  2.5097435  1.683121 -0.2549745 -2.90384054
## 5  2.0523429 -2.2084844 -1.9280857 -2.116736  1.8180779 -1.42167580
## 6  0.2184991 -0.7599817  2.6880329 -2.903350 -1.0733233 -2.92416644
##       stat29     stat30     stat31     stat32      stat33     stat34
## 1  0.7991088 -2.0059092 -0.2461502  0.6482101 -2.87462163 -0.3601543
## 2  1.9758110 -0.3874187  1.3566630  2.6493473  2.28463054  1.8591728
## 3 -0.4460218  1.0279679  1.3998452 -1.0183365  1.41109037 -2.4183984
## 4  1.0571996  2.5588036 -2.9830337 -1.1299983  0.05470414 -1.5566561
## 5  0.8854889  2.2774174  2.6499031  2.3053405 -2.39148426 -1.8272992
## 6 -0.8405267  0.1311945  0.4321289 -2.9622040 -2.55387473  2.6396458
##       stat35     stat36     stat37     stat38     stat39     stat40
## 1  2.4286051 -0.5420244 -2.6782637 -2.8874269 -0.8945006  1.1749642
## 2  1.3709245 -1.3714181  1.3901204  1.2273489 -0.8934880  1.0540369
## 3 -0.9805572  2.0571353  0.8845031  2.0574493  1.1222047  1.8528618
## 4  1.0969149 -2.2820673  1.8852408  0.5391517  2.7334342 -0.4372566
## 5 -1.0971669  1.4867796 -2.3738465 -0.3743561  1.4266498  1.2551680
## 6  0.4584349 -2.2696617 -0.9935142 -0.5350499 -0.7874799  2.0009417
##       stat41     stat42     stat43     stat44     stat45      stat46
## 1 -1.0474428 -1.3909023 2.54110503 -1.4320793  0.6298335 -2.09296608
## 2  2.5380247  1.6476108 0.44128850 -2.5049477  1.2726039  1.72492969
## 3  1.1477574  0.2288794 0.08891252  2.3044751 -0.7735722 -0.07302936
## 4 -1.3808300 -2.7900956 2.38297582  0.1686397 -2.1591296  1.60828602
## 5  0.2257536  1.9542116 2.66429019  0.8026123 -1.5521187  1.61751962
## 6 -1.3364114 -2.2898803 2.80735397 -0.8413086  1.0057797 -1.50653386
##       stat47     stat48     stat49     stat50     stat51     stat52
## 1 -2.8318939  2.1445766  0.5668035  0.1544579  0.6291955  2.2197027
## 2 -0.5804687 -1.3689737  1.4908396  1.2465997  0.8896304 -2.6024318
## 3  0.7918019  1.5712964  1.1038082 -0.2545658 -2.1662638  0.2660159
## 4 -1.8894132  0.5680230 -0.7023218 -0.3972188  0.1578027  2.1770194
## 5  2.1088455 -2.7195437  2.1961412 -0.2615084  1.2109556  0.8260623
## 6 -1.4400891 -0.9421459 -1.7324599 -2.1720727 -2.8129435  0.6958785
##      stat53     stat54      stat55     stat56     stat57      stat58
## 1  2.176805  0.5546907 -2.19704103 -0.2884173  1.3232913 -1.32824039
## 2 -2.107441  1.3864788  0.08781975  1.9998228  0.8014438 -0.26979154
## 3  1.234197  2.1337581  1.65231645 -0.4388691 -0.1811156  2.11277962
## 4  2.535406 -2.1387620  0.12856023 -1.9906180  0.9626449  1.65232646
## 5 -2.457080  2.1633499  0.60441124  2.5449364 -1.4978440  2.60542655
## 6  2.003033 -0.5379940 -2.19647264 -1.1954677 -0.5974466 -0.04703835
##        stat59     stat60    stat61      stat62     stat63     stat64
## 1  1.24239659 -2.5798278  1.327928  1.68560362  0.6284891 -1.6798652
## 2  0.06379301  0.9465770  1.116928  0.03128772 -2.1944375  0.3382609
## 3  0.93223447  2.4597080  0.465251 -1.71033382 -0.5156728  1.8276784
## 4 -0.29840910  0.7273473 -2.313066 -1.47696018  2.5910559 -1.5127999
## 5 -1.17610002 -1.7948418 -2.669305  0.17813617  2.8956099  2.9411416
## 6 -1.01793981  0.2817057  2.228023 -0.86494124 -0.9747949 -0.1569053
##       stat65     stat66     stat67     stat68     stat69    stat70
## 1 -2.9490898 -0.3325469  1.5745990 -2.2978280  1.5451891 -1.345990
## 2 -1.1174885 -1.5728682 -2.9229002  0.2658547 -1.9616533  2.506130
## 3 -0.2231264 -0.4503301  0.7932286 -1.2453773 -2.2309763  2.309761
## 4 -0.3522418 -2.0720532  0.9442933  2.9212906  0.5100371 -2.441108
## 5 -2.1648991  1.2002029  2.8266985  0.7461294  1.6772674 -1.280000
## 6 -2.2295458  1.1446493  0.2024925 -0.2983998 -2.8203752  1.224030
##       stat71     stat72     stat73     stat74      stat75     stat76
## 1  1.0260956  2.1071210  2.6625669 -2.8924677 -0.02132523 -2.5058765
## 2  0.3525076  1.6922342 -1.2167022 -1.7271879  2.21176434  1.9329683
## 3 -2.1799035 -2.2645276  0.1415582  0.9887453  1.95592320  0.2951785
## 4 -2.4051409  2.0876484 -0.8632146  0.4011389 -1.16986716 -1.2391174
## 5  1.3538754 -0.8089395 -0.5122626 -2.1696892  1.07344925  2.6696169
## 6 -2.8073371 -1.4450488  0.5481212 -1.4381690  0.80917043 -0.1365944
##       stat77      stat78      stat79     stat80     stat81     stat82
## 1 -2.5631845 -2.40331340  0.38416120 -1.2564875 -0.1550840 -1.1762617
## 2 -0.4462085  0.38400793  1.80483031 -0.8387642  0.7624431  0.9936900
## 3  1.6757870 -1.81900752  2.70904708 -0.3201959  2.5754235  1.6346260
## 4 -2.1012006 -2.24691081  1.78056848  1.0323739  1.0762523  2.1343851
## 5 -2.5736733 -1.99958372 -0.05388495 -2.5630073 -2.8783002 -0.5752426
## 6  1.6143972  0.03233746  2.90835762  1.4000487  2.9275615 -2.8503830
##       stat83     stat84     stat85     stat86    stat87     stat88
## 1  1.2840565 -2.6794965  1.3956039 -1.5290235  2.221152  2.3794982
## 2 -0.2380048  1.9314318 -1.6747955 -0.3663656  1.582659 -0.5222489
## 3 -0.9150769 -1.5520337  2.4186287  2.7273662  1.306642  0.1320062
## 4 -2.5824408 -2.7775943  0.5085060  0.4689015  2.053348  0.7957955
## 5 -1.0017741 -0.2009138  0.3770109  2.4335201 -1.118058  1.3953410
## 6  2.4891765  2.9931953 -1.4171852  0.3905659 -1.856119 -2.1690490
##       stat89     stat90     stat91      stat92     stat93     stat94
## 1 -0.9885110 -0.8873261 -2.7810929 -1.53325891  2.6002395  1.8890998
## 2  0.9982028 -1.2382015 -0.1574496  0.41086048 -0.5412626 -0.2421387
## 3  0.5956759  1.6871066  2.2452753  2.74279594 -1.5860478  2.9393122
## 4  2.0902634  2.1752586 -2.0677712 -2.37861037  1.1653302  0.1500632
## 5  2.9820614  0.8111660 -0.7842287  0.03766387 -1.1681970  2.1217251
## 6 -1.7428021  0.1579032  1.7456742 -0.36858466 -0.1304616 -1.4555819
##       stat95     stat96      stat97     stat98     stat99   stat100
## 1 -2.6056035 -0.5814857  2.57652426 -2.3297751  2.6324007  1.445827
## 2 -2.0271583 -0.9126074  2.49582648  0.9745382  1.1339203 -2.549544
## 3  0.3823181 -0.6324139  2.46221566  1.1151560  0.4624891  0.107072
## 4  2.6414623 -0.6630505  2.10394859  1.2627635  0.4861740  1.697012
## 5  1.4642254  2.6485956 -0.07699547  0.6219473 -1.8815142 -2.685463
## 6  1.8937331 -0.4690555  1.04671776 -0.5879866 -0.9766789  2.405940
##      stat101   stat102    stat103    stat104    stat105    stat106
## 1 -2.1158021  2.603936  1.7745128 -1.8903574 -1.8558655  1.0122044
## 2 -2.7998588 -2.267895  0.5336456 -0.2859477 -0.5196246 -0.9417582
## 3  0.7969509 -1.744906 -0.7960327  1.9767258 -0.2007264 -0.7872376
## 4  1.7071959 -1.540221  1.6770362  1.5395796 -0.4855365 -1.2894115
## 5 -1.4627420 -1.700983  2.4376490  0.2731541  1.5275587  1.3256483
## 6  2.6888530  1.090155  2.0769854  1.9615480  1.8689761  2.8975825
##     stat107    stat108   stat109    stat110    stat111    stat112
## 1  1.954508 -0.3376471  2.503084  0.3099165  2.7209847 -2.3911204
## 2 -2.515160  0.3998704 -1.077093  2.4228268 -0.7759693  0.2513882
## 3  1.888827  1.5819857 -2.066659 -2.0008364  0.6997684  2.6157095
## 4  1.076395 -1.8524148 -2.689204  1.0985872  1.2389493  2.1018629
## 5  2.828866 -1.8590252 -2.424163  1.4391942 -0.6173239 -1.5218846
## 6 -1.419639  0.7888914  1.996463  0.9813507  0.9034198  1.3810679
##     stat113    stat114    stat115     stat116   stat117    stat118
## 1 -1.616161  1.0878664  0.9860094 -0.06288462 -1.013501 -1.2212842
## 2 -1.554771  1.8683100  0.4880588 -0.63865489 -1.610217 -1.7713343
## 3 -2.679801 -2.9486952  1.7753417  0.90311784 -1.318836 -0.1429040
## 4  2.459229 -0.5584171  0.4419581 -0.09586351  0.595442  0.2883342
## 5 -2.102200  1.6300170 -2.3498287  1.36771894 -1.912202 -0.2563821
## 6 -1.835037  0.6577786 -2.9928374  2.13540316 -1.437299 -0.9570006
##      stat119    stat120    stat121    stat122    stat123     stat124
## 1  2.9222729  1.9151262  1.6686068  2.0061224  1.5723072  0.78819227
## 2  2.1828208  0.8283178 -2.4458632  1.7133740  1.1393738 -0.07182054
## 3  0.9721319  1.2723130  2.8002086  2.7670381 -2.2252586  2.17499113
## 4 -1.9327896 -2.5369370  1.7835028  1.0262097 -1.8790983 -0.43639564
## 5  1.3230809 -2.8145256 -0.9547533 -2.0435417 -0.2758764 -1.85668027
## 6  0.1720700 -1.4568460  1.4115051 -0.9878145  2.3895061 -2.33730745
##     stat125    stat126    stat127   stat128    stat129     stat130
## 1  1.588372  1.1620011 -0.2474264  1.650328  2.5147598  0.37283245
## 2 -1.173771  0.8162020  0.3510315 -1.263667  1.7245284 -0.72852904
## 3 -1.503497 -0.5656394  2.8040256 -2.139287 -1.7221642  2.17899609
## 4  1.040967 -2.9039600  0.3103742  1.462339 -1.2940350 -2.95015502
## 5 -2.866184  1.6885070 -2.2525666 -2.628631  1.8581577  2.80127025
## 6 -1.355111  1.5017927  0.4295921 -0.580415  0.9851009 -0.03773117
##       stat131    stat132    stat133    stat134    stat135      stat136
## 1 -0.09028241  0.5194538  2.8478346  2.6664724 -2.0206311  1.398415090
## 2 -0.53045595  1.4134049  2.9180586  0.3299096  1.4784122 -1.278896090
## 3  1.35843194  0.2279946  0.3532595  0.6138676 -0.3443284  0.057763811
## 4 -1.92450273  1.2698178 -1.5299660 -2.6083462  1.1665530 -0.187791914
## 5  1.49036849  2.6337729 -2.3206244  0.4978287 -1.7397571  0.001200184
## 6 -0.64642709 -1.9256228  1.7032650 -0.9152725 -0.3188055  2.155395980
##      stat137    stat138    stat139    stat140    stat141    stat142
## 1 -1.2794871  0.4064890 -0.4539998  2.6660173 -1.8375313  0.4711883
## 2 -2.7709017 -1.6303773 -1.9025910  0.2572918  0.6612002  1.4764348
## 3 -1.1930757 -0.1051243 -0.5108380 -1.0879666  2.4969513 -0.9477230
## 4 -1.2318919  2.2348571  0.1788580 -1.5851788 -1.2384283 -2.1859181
## 5  1.8685058  2.7229517 -2.9077182  2.6606939 -1.5963592 -2.2213492
## 6 -0.4807318 -1.2117369 -0.9358531 -2.5100758 -2.3803916 -0.7096854
##     stat143    stat144    stat145    stat146     stat147    stat148
## 1 1.9466263  2.2689433 -0.3597288 -0.6551386  1.65438592  0.6404466
## 2 1.3156421  2.4459090 -0.3790028  1.4858465 -0.07784461  1.0096149
## 3 0.1959563  2.3062942  1.8459278  2.6848175 -2.70935774 -1.2093409
## 4 1.7633296 -2.8171508  2.0902622 -2.6625464 -1.12600601 -2.1926479
## 5 0.3885758  1.8160636  2.8257299 -1.4526173  1.60679603  2.3807991
## 6 0.7623450  0.2692145 -2.4307463 -2.1244523 -2.67803812 -1.5273387
##      stat149   stat150    stat151    stat152    stat153    stat154
## 1  0.1583575 0.4755351  0.3213410  2.0241520  1.5720103 -0.1825875
## 2 -0.4311406 2.9577663  0.6937252  0.1397280  0.3775735 -1.1012636
## 3 -0.8352824 2.5716205  1.7528236  0.4326277 -2.2334397 -2.6265771
## 4 -2.8069143 1.8813509  2.3358023  0.1015632  1.2117474 -1.3714278
## 5 -1.6166265 1.1112266 -1.1998471  2.9316769 -2.1676455 -0.3411089
## 6 -0.2265472 2.7264354 -1.6746094 -2.3376281 -1.7022788 -1.2352397
##     stat155     stat156    stat157    stat158     stat159   stat160
## 1 -1.139657  0.07061254  0.5893906 -1.9920996 -2.83714366  2.249398
## 2 -2.041093  0.74047768  2.5415072 -1.2697256 -1.64364433 -2.448922
## 3 -1.219507 -0.55198693  0.4046920  1.2098547 -0.90412390 -1.934093
## 4  2.992191  2.33222485  2.0622969 -0.6714653  2.76836085 -1.431120
## 5 -2.362356 -1.23906672  0.4746319 -0.7849202  0.69399995  2.052411
## 6 -1.604499  1.31051409 -0.5164744  0.6288667  0.07899523 -2.287402
##      stat161    stat162    stat163    stat164    stat165    stat166
## 1  1.7182635 -1.2323593  2.7350423  1.0707235  1.1621544  0.9493989
## 2 -0.6247674  2.6740098  2.8211024  1.5561292 -1.1027147  1.0519739
## 3 -0.6230453 -0.7993517 -2.8318374 -1.1148673  1.4261659  0.5294309
## 4  1.7644744  0.1696584  1.2653207  0.6621516  0.9470508  0.1985014
## 5 -1.2070210  0.7243784  0.9736322  2.7426259 -2.6862383  1.6840212
## 6  2.3705316 -2.1667893 -0.2516685 -0.8425958 -1.9099342 -2.8607297
##      stat167    stat168    stat169     stat170     stat171    stat172
## 1  0.1146510  2.3872008  1.1180918 -0.95370555 -2.25076509  0.2348182
## 2  1.0760417 -2.0449336  0.9715676 -0.40173489 -0.11953555 -2.3107369
## 3  1.1735898  1.3860190 -2.2894719  0.06350347  0.29191551 -1.6079744
## 4  2.5511832  0.5446648  1.2694012 -0.84571201  0.79789722  0.2623538
## 5  2.2900002  2.6289782 -0.2783571  1.39032829 -0.55532032  1.0499046
## 6 -0.7513983  2.9617066 -2.2119520 -1.71958113 -0.01452018 -0.2751517
##       stat173   stat174     stat175    stat176     stat177    stat178
## 1  1.79366076 -1.920206 -0.38841942  0.8530301  1.64532077 -1.1354179
## 2 -0.07484659  1.337846  2.20911694  0.9616837 -2.80810070 -2.1136749
## 3 -1.05521810 -1.483741  0.06148359  2.3066039 -0.34688616  1.1840581
## 4  0.31460321  1.195741  2.97633862  1.1685091 -0.06346265  1.4205489
## 5 -1.39428365  2.458523  0.64836472 -1.0396386 -0.57828104 -0.5006818
## 6  2.31844401  1.239864 -2.06490874  0.7696204 -1.77586019  2.0855925
##      stat179    stat180     stat181    stat182    stat183    stat184
## 1  2.0018647  0.1476815 -1.27279520  1.9181504 -0.5297624 -2.9718938
## 2 -2.1351449  2.9012582 -1.09914911 -2.5488517 -2.8377736  1.4073374
## 3 -1.7819908  2.9902627  0.81908613  0.2503852  0.3712984 -2.1714024
## 4 -0.1026974 -2.4763253 -2.52645421  1.3096315  2.1458161 -1.5228094
## 5 -2.2298794  2.4465680 -0.70346898 -1.6997617  2.9178164 -0.3615532
## 6 -1.1168108  1.5552123 -0.01361342  1.7338791 -1.1104763  0.1882416
##      stat185    stat186   stat187    stat188    stat189    stat190
## 1 -0.1043832 -1.5047463  2.700351 -2.4780862 -1.9078265  0.9978108
## 2 -2.0310574 -0.5380074 -1.963275 -1.2221278 -2.4290681 -1.9515115
## 3  2.6727278  1.2688179 -1.399018 -2.9612138  2.6456394  2.0073323
## 4 -2.7796295  2.0682354  2.243727  0.4296881  0.1931333  2.2710960
## 5 -0.6231265  2.5833981  2.229041  0.8139584  1.4544131  1.8886451
## 6  2.7204690 -2.4469144 -1.421998  1.7477882 -0.1481806  0.6011560
##      stat191    stat192    stat193    stat194   stat195    stat196
## 1 -0.6644351  2.6270833 -1.1094601 -2.4200392  2.870713 -0.6590932
## 2 -0.6483142  1.4519118 -0.1963493 -2.3025322  1.255608  2.1617947
## 3 -1.5457382 -0.2977442 -1.7045015  0.7962404 -1.696063 -1.4771117
## 4 -1.1780495 -2.9747574 -1.1471518 -1.2377013 -1.010672 -2.6055975
## 5  2.8813178 -1.8964081 -1.2653487 -1.7839754 -2.872581  2.3033464
## 6  0.4437973  0.6599325 -1.4029555 -2.3118258 -1.792232  1.3934380
##       stat197    stat198    stat199    stat200    stat201    stat202
## 1 -0.83056986  0.9550526 -1.7025776 -2.8263099 -0.7023998  0.2272806
## 2 -1.42178249 -1.2471864  2.5723093 -0.0233496 -1.8975239  1.9472262
## 3 -0.19233958 -0.5161456  0.0279946 -1.2333704 -2.9672263 -2.8666208
## 4 -1.23145902  1.4728470 -0.4562025 -2.2983441 -1.5101184  0.2530525
## 5  1.85018563 -1.8269292 -0.6337969 -2.1473246  0.9909850  1.0950903
## 6 -0.09311061  0.5144456 -2.8178268 -2.7555969 -2.3546004 -1.0558939
##        stat203      stat204    stat205    stat206    stat207    stat208
## 1  1.166631220  0.007453276  2.9961641  1.5327307 -2.2293356 -0.9946009
## 2 -0.235396504  2.132749800  0.3707606  1.5604026 -1.0089217  2.1474257
## 3  0.003180946  2.229793310  2.7354040  0.8992231  2.9694967  2.3081024
## 4 -0.474482715 -1.584772230 -2.3224132 -0.9409741 -2.3179255  0.8032548
## 5  2.349412920 -1.276320220 -2.0203719 -1.1733509  1.0371852 -2.5086207
## 6  0.727436960 -0.960191786 -0.8964998 -1.6406623 -0.2330488  1.7993879
##      stat209    stat210   stat211    stat212    stat213    stat214
## 1 -2.2182105 -1.4099774 -1.656754  2.6602585 -2.9270992  1.1240714
## 2 -2.8932488 -1.1641679 -2.605423 -1.5650513  2.9523673  2.0266318
## 3 -1.8279589  0.0472350 -2.026734  2.5054367  0.9903042  0.3274105
## 4 -1.0878067  0.1171303  2.645891 -1.6775225  1.3452160  1.4694063
## 5 -0.8158175  0.4060950  0.912256  0.2925677  2.1610141  0.5679936
## 6 -2.2664354 -0.2061083 -1.435174  2.6645632  0.4216259 -0.6419122
##      stat215    stat216    stat217
## 1 -2.7510750 -0.5501796  1.2638469
## 2  2.8934650 -2.4099574 -1.2411407
## 3 -1.0947676  1.2852937  1.5411530
## 4  0.6343777  0.1345372  2.9102673
## 5  0.9908702  1.7909757 -2.0902610
## 6 -2.8113887 -1.0624912  0.2765074
head(features.highprec)
##     JobName        x1       x2       x3        x4       x5        x6
## 1 Job_00001 2.0734508 4.917267 19.96188  3.520878 7.861051 1.6067589
## 2 Job_00002 2.2682543 4.955773 19.11939 19.763031 6.931355 1.3622041
## 3 Job_00003 1.7424456 2.059819 13.37912 38.829132 6.274053 2.0529845
## 4 Job_00004 0.7873555 2.613983 17.23044 64.402557 5.377652 0.9067419
## 5 Job_00005 2.3342753 4.299076 14.64883 52.537304 6.793368 2.4605792
## 6 Job_00006 1.2365089 2.795370 11.13127 96.819939 6.583971 2.3510606
##         x7        x8       x9       x10          x11      x12       x13
## 1 2.979479  8.537228 1.103368 4.6089458 1.050025e-07 7.995825 13.215498
## 2 2.388119  6.561461 0.588572 1.0283282 1.034518e-07 7.486966 22.557224
## 3 2.043592 10.275595 4.834385 4.3872848 1.062312e-07 6.350142 15.049810
## 4 2.395118 13.487331 3.340190 4.5053501 9.471887e-08 9.548698 17.170635
## 5 2.891535  9.362389 1.246039 1.7333300 1.010552e-07 9.596095  5.794567
## 6 1.247838  7.033354 1.852231 0.4839371 1.071662e-07 3.810983 23.863169
##        x14       x15       x16      x17      x18      x19      x20
## 1 4.377983 0.2370623  6.075459 3.988347 4.767475 2.698775 1.035893
## 2 2.059315 0.5638121  6.903891 4.152054 6.849232 9.620731 1.915288
## 3 3.260057 2.0603445  8.424065 4.489893 3.493591 4.715386 1.558103
## 4 3.093478 1.8806034 11.189792 2.134271 5.588357 5.107871 1.489588
## 5 3.943076 1.5820830  7.096742 3.563378 7.765610 1.360272 1.240283
## 6 1.280562 1.1733382  7.062051 1.341864 7.748325 5.009365 1.725179
##        x21      x22      x23      stat1      stat2      stat3      stat4
## 1 42.36548 1.356213 2.699796  2.3801832  0.1883335 -1.2284011 -0.5999233
## 2 26.63295 4.053961 2.375127 -1.4069480  1.8140973  1.6204884  2.6422672
## 3 20.09693 3.079888 4.488420 -0.7672566 -0.1230289  1.1415752  2.9805934
## 4 32.60415 1.355396 3.402398  0.4371202 -1.9355906  0.9028624 -1.6025400
## 5 44.58361 1.940301 2.249011  2.4492466 -0.6172000 -2.5520642 -2.1485929
## 6 28.75102 2.500499 5.563972 -1.7899084  1.8853619  2.4154840 -2.6022179
##          stat5      stat6      stat7      stat8      stat9      stat10
## 1  0.148893163 -0.6622978 -2.4851868  0.3647782  2.5364335  2.92067981
## 2  1.920768980  1.7411555 -1.9599979 -2.0190558 -1.3732762 -0.31642506
## 3  2.422584300 -0.4166040  2.2205689 -2.6741531  0.4844292  2.73379230
## 4 -0.001795933 -0.6946563 -0.3693534 -0.9709467  1.7960306  0.74771154
## 5 -2.311132430 -1.0166832  2.7269876  1.5424492 -1.3156369 -0.09767897
## 6 -1.785491470 -1.8599915  1.4875095  2.0188572 -1.4892503 -1.41103566
##       stat11    stat12     stat13     stat14     stat15     stat16
## 1 -2.3228905 -2.480567 -0.6335157 -0.3650149 -0.5322812  0.6029300
## 2 -0.8547903  1.119316  0.7227427  0.2121097 -0.1452281 -2.0361528
## 3 -2.1821580  2.865401 -2.9756081  2.9871745  1.9539525 -1.8857163
## 4  1.3982378  1.856765 -1.0379983  2.3341896  2.3057184 -2.8947697
## 5  0.9567220  2.567549  0.3184886  1.0307668  0.1644241 -0.6613821
## 6  0.5341771 -1.461822  0.4402476 -1.9282095 -0.3680157  1.8188807
##        stat17     stat18     stat19     stat20     stat21     stat22
## 1 -1.04516208  2.3544915  2.4049001  0.2633883 -0.9788178  1.7868229
## 2  0.09513074  0.4727738  1.8899702  2.7892542 -1.3919091 -1.7198164
## 3  0.40285346  1.4655282 -1.4952788  2.9162340 -2.3893208  2.8161423
## 4  2.97446084  2.3896182  2.3083484 -1.1894441 -2.1982553  1.3666242
## 5 -0.98465055  0.6900643  1.5894209 -2.1204538  1.7961155 -0.9362189
## 6 -1.45726359 -2.1139548 -0.3964904  1.1764175 -2.9100556 -2.1359294
##       stat23     stat24     stat25    stat26     stat27      stat28
## 1 -2.3718851  2.8580718 -0.4719713 -2.817086 -0.9518474  2.88892484
## 2 -2.3293245  1.5577759 -1.9569720  1.554194 -0.5081459 -1.58715141
## 3 -2.5402296  0.1422861  0.3572798 -1.051886 -2.1541717  0.03074004
## 4 -1.9679050 -1.4077642  2.5097435  1.683121 -0.2549745 -2.90384054
## 5  2.0523429 -2.2084844 -1.9280857 -2.116736  1.8180779 -1.42167580
## 6  0.2184991 -0.7599817  2.6880329 -2.903350 -1.0733233 -2.92416644
##       stat29     stat30     stat31     stat32      stat33     stat34
## 1  0.7991088 -2.0059092 -0.2461502  0.6482101 -2.87462163 -0.3601543
## 2  1.9758110 -0.3874187  1.3566630  2.6493473  2.28463054  1.8591728
## 3 -0.4460218  1.0279679  1.3998452 -1.0183365  1.41109037 -2.4183984
## 4  1.0571996  2.5588036 -2.9830337 -1.1299983  0.05470414 -1.5566561
## 5  0.8854889  2.2774174  2.6499031  2.3053405 -2.39148426 -1.8272992
## 6 -0.8405267  0.1311945  0.4321289 -2.9622040 -2.55387473  2.6396458
##       stat35     stat36     stat37     stat38     stat39     stat40
## 1  2.4286051 -0.5420244 -2.6782637 -2.8874269 -0.8945006  1.1749642
## 2  1.3709245 -1.3714181  1.3901204  1.2273489 -0.8934880  1.0540369
## 3 -0.9805572  2.0571353  0.8845031  2.0574493  1.1222047  1.8528618
## 4  1.0969149 -2.2820673  1.8852408  0.5391517  2.7334342 -0.4372566
## 5 -1.0971669  1.4867796 -2.3738465 -0.3743561  1.4266498  1.2551680
## 6  0.4584349 -2.2696617 -0.9935142 -0.5350499 -0.7874799  2.0009417
##       stat41     stat42     stat43     stat44     stat45      stat46
## 1 -1.0474428 -1.3909023 2.54110503 -1.4320793  0.6298335 -2.09296608
## 2  2.5380247  1.6476108 0.44128850 -2.5049477  1.2726039  1.72492969
## 3  1.1477574  0.2288794 0.08891252  2.3044751 -0.7735722 -0.07302936
## 4 -1.3808300 -2.7900956 2.38297582  0.1686397 -2.1591296  1.60828602
## 5  0.2257536  1.9542116 2.66429019  0.8026123 -1.5521187  1.61751962
## 6 -1.3364114 -2.2898803 2.80735397 -0.8413086  1.0057797 -1.50653386
##       stat47     stat48     stat49     stat50     stat51     stat52
## 1 -2.8318939  2.1445766  0.5668035  0.1544579  0.6291955  2.2197027
## 2 -0.5804687 -1.3689737  1.4908396  1.2465997  0.8896304 -2.6024318
## 3  0.7918019  1.5712964  1.1038082 -0.2545658 -2.1662638  0.2660159
## 4 -1.8894132  0.5680230 -0.7023218 -0.3972188  0.1578027  2.1770194
## 5  2.1088455 -2.7195437  2.1961412 -0.2615084  1.2109556  0.8260623
## 6 -1.4400891 -0.9421459 -1.7324599 -2.1720727 -2.8129435  0.6958785
##      stat53     stat54      stat55     stat56     stat57      stat58
## 1  2.176805  0.5546907 -2.19704103 -0.2884173  1.3232913 -1.32824039
## 2 -2.107441  1.3864788  0.08781975  1.9998228  0.8014438 -0.26979154
## 3  1.234197  2.1337581  1.65231645 -0.4388691 -0.1811156  2.11277962
## 4  2.535406 -2.1387620  0.12856023 -1.9906180  0.9626449  1.65232646
## 5 -2.457080  2.1633499  0.60441124  2.5449364 -1.4978440  2.60542655
## 6  2.003033 -0.5379940 -2.19647264 -1.1954677 -0.5974466 -0.04703835
##        stat59     stat60    stat61      stat62     stat63     stat64
## 1  1.24239659 -2.5798278  1.327928  1.68560362  0.6284891 -1.6798652
## 2  0.06379301  0.9465770  1.116928  0.03128772 -2.1944375  0.3382609
## 3  0.93223447  2.4597080  0.465251 -1.71033382 -0.5156728  1.8276784
## 4 -0.29840910  0.7273473 -2.313066 -1.47696018  2.5910559 -1.5127999
## 5 -1.17610002 -1.7948418 -2.669305  0.17813617  2.8956099  2.9411416
## 6 -1.01793981  0.2817057  2.228023 -0.86494124 -0.9747949 -0.1569053
##       stat65     stat66     stat67     stat68     stat69    stat70
## 1 -2.9490898 -0.3325469  1.5745990 -2.2978280  1.5451891 -1.345990
## 2 -1.1174885 -1.5728682 -2.9229002  0.2658547 -1.9616533  2.506130
## 3 -0.2231264 -0.4503301  0.7932286 -1.2453773 -2.2309763  2.309761
## 4 -0.3522418 -2.0720532  0.9442933  2.9212906  0.5100371 -2.441108
## 5 -2.1648991  1.2002029  2.8266985  0.7461294  1.6772674 -1.280000
## 6 -2.2295458  1.1446493  0.2024925 -0.2983998 -2.8203752  1.224030
##       stat71     stat72     stat73     stat74      stat75     stat76
## 1  1.0260956  2.1071210  2.6625669 -2.8924677 -0.02132523 -2.5058765
## 2  0.3525076  1.6922342 -1.2167022 -1.7271879  2.21176434  1.9329683
## 3 -2.1799035 -2.2645276  0.1415582  0.9887453  1.95592320  0.2951785
## 4 -2.4051409  2.0876484 -0.8632146  0.4011389 -1.16986716 -1.2391174
## 5  1.3538754 -0.8089395 -0.5122626 -2.1696892  1.07344925  2.6696169
## 6 -2.8073371 -1.4450488  0.5481212 -1.4381690  0.80917043 -0.1365944
##       stat77      stat78      stat79     stat80     stat81     stat82
## 1 -2.5631845 -2.40331340  0.38416120 -1.2564875 -0.1550840 -1.1762617
## 2 -0.4462085  0.38400793  1.80483031 -0.8387642  0.7624431  0.9936900
## 3  1.6757870 -1.81900752  2.70904708 -0.3201959  2.5754235  1.6346260
## 4 -2.1012006 -2.24691081  1.78056848  1.0323739  1.0762523  2.1343851
## 5 -2.5736733 -1.99958372 -0.05388495 -2.5630073 -2.8783002 -0.5752426
## 6  1.6143972  0.03233746  2.90835762  1.4000487  2.9275615 -2.8503830
##       stat83     stat84     stat85     stat86    stat87     stat88
## 1  1.2840565 -2.6794965  1.3956039 -1.5290235  2.221152  2.3794982
## 2 -0.2380048  1.9314318 -1.6747955 -0.3663656  1.582659 -0.5222489
## 3 -0.9150769 -1.5520337  2.4186287  2.7273662  1.306642  0.1320062
## 4 -2.5824408 -2.7775943  0.5085060  0.4689015  2.053348  0.7957955
## 5 -1.0017741 -0.2009138  0.3770109  2.4335201 -1.118058  1.3953410
## 6  2.4891765  2.9931953 -1.4171852  0.3905659 -1.856119 -2.1690490
##       stat89     stat90     stat91      stat92     stat93     stat94
## 1 -0.9885110 -0.8873261 -2.7810929 -1.53325891  2.6002395  1.8890998
## 2  0.9982028 -1.2382015 -0.1574496  0.41086048 -0.5412626 -0.2421387
## 3  0.5956759  1.6871066  2.2452753  2.74279594 -1.5860478  2.9393122
## 4  2.0902634  2.1752586 -2.0677712 -2.37861037  1.1653302  0.1500632
## 5  2.9820614  0.8111660 -0.7842287  0.03766387 -1.1681970  2.1217251
## 6 -1.7428021  0.1579032  1.7456742 -0.36858466 -0.1304616 -1.4555819
##       stat95     stat96      stat97     stat98     stat99   stat100
## 1 -2.6056035 -0.5814857  2.57652426 -2.3297751  2.6324007  1.445827
## 2 -2.0271583 -0.9126074  2.49582648  0.9745382  1.1339203 -2.549544
## 3  0.3823181 -0.6324139  2.46221566  1.1151560  0.4624891  0.107072
## 4  2.6414623 -0.6630505  2.10394859  1.2627635  0.4861740  1.697012
## 5  1.4642254  2.6485956 -0.07699547  0.6219473 -1.8815142 -2.685463
## 6  1.8937331 -0.4690555  1.04671776 -0.5879866 -0.9766789  2.405940
##      stat101   stat102    stat103    stat104    stat105    stat106
## 1 -2.1158021  2.603936  1.7745128 -1.8903574 -1.8558655  1.0122044
## 2 -2.7998588 -2.267895  0.5336456 -0.2859477 -0.5196246 -0.9417582
## 3  0.7969509 -1.744906 -0.7960327  1.9767258 -0.2007264 -0.7872376
## 4  1.7071959 -1.540221  1.6770362  1.5395796 -0.4855365 -1.2894115
## 5 -1.4627420 -1.700983  2.4376490  0.2731541  1.5275587  1.3256483
## 6  2.6888530  1.090155  2.0769854  1.9615480  1.8689761  2.8975825
##     stat107    stat108   stat109    stat110    stat111    stat112
## 1  1.954508 -0.3376471  2.503084  0.3099165  2.7209847 -2.3911204
## 2 -2.515160  0.3998704 -1.077093  2.4228268 -0.7759693  0.2513882
## 3  1.888827  1.5819857 -2.066659 -2.0008364  0.6997684  2.6157095
## 4  1.076395 -1.8524148 -2.689204  1.0985872  1.2389493  2.1018629
## 5  2.828866 -1.8590252 -2.424163  1.4391942 -0.6173239 -1.5218846
## 6 -1.419639  0.7888914  1.996463  0.9813507  0.9034198  1.3810679
##     stat113    stat114    stat115     stat116   stat117    stat118
## 1 -1.616161  1.0878664  0.9860094 -0.06288462 -1.013501 -1.2212842
## 2 -1.554771  1.8683100  0.4880588 -0.63865489 -1.610217 -1.7713343
## 3 -2.679801 -2.9486952  1.7753417  0.90311784 -1.318836 -0.1429040
## 4  2.459229 -0.5584171  0.4419581 -0.09586351  0.595442  0.2883342
## 5 -2.102200  1.6300170 -2.3498287  1.36771894 -1.912202 -0.2563821
## 6 -1.835037  0.6577786 -2.9928374  2.13540316 -1.437299 -0.9570006
##      stat119    stat120    stat121    stat122    stat123     stat124
## 1  2.9222729  1.9151262  1.6686068  2.0061224  1.5723072  0.78819227
## 2  2.1828208  0.8283178 -2.4458632  1.7133740  1.1393738 -0.07182054
## 3  0.9721319  1.2723130  2.8002086  2.7670381 -2.2252586  2.17499113
## 4 -1.9327896 -2.5369370  1.7835028  1.0262097 -1.8790983 -0.43639564
## 5  1.3230809 -2.8145256 -0.9547533 -2.0435417 -0.2758764 -1.85668027
## 6  0.1720700 -1.4568460  1.4115051 -0.9878145  2.3895061 -2.33730745
##     stat125    stat126    stat127   stat128    stat129     stat130
## 1  1.588372  1.1620011 -0.2474264  1.650328  2.5147598  0.37283245
## 2 -1.173771  0.8162020  0.3510315 -1.263667  1.7245284 -0.72852904
## 3 -1.503497 -0.5656394  2.8040256 -2.139287 -1.7221642  2.17899609
## 4  1.040967 -2.9039600  0.3103742  1.462339 -1.2940350 -2.95015502
## 5 -2.866184  1.6885070 -2.2525666 -2.628631  1.8581577  2.80127025
## 6 -1.355111  1.5017927  0.4295921 -0.580415  0.9851009 -0.03773117
##       stat131    stat132    stat133    stat134    stat135      stat136
## 1 -0.09028241  0.5194538  2.8478346  2.6664724 -2.0206311  1.398415090
## 2 -0.53045595  1.4134049  2.9180586  0.3299096  1.4784122 -1.278896090
## 3  1.35843194  0.2279946  0.3532595  0.6138676 -0.3443284  0.057763811
## 4 -1.92450273  1.2698178 -1.5299660 -2.6083462  1.1665530 -0.187791914
## 5  1.49036849  2.6337729 -2.3206244  0.4978287 -1.7397571  0.001200184
## 6 -0.64642709 -1.9256228  1.7032650 -0.9152725 -0.3188055  2.155395980
##      stat137    stat138    stat139    stat140    stat141    stat142
## 1 -1.2794871  0.4064890 -0.4539998  2.6660173 -1.8375313  0.4711883
## 2 -2.7709017 -1.6303773 -1.9025910  0.2572918  0.6612002  1.4764348
## 3 -1.1930757 -0.1051243 -0.5108380 -1.0879666  2.4969513 -0.9477230
## 4 -1.2318919  2.2348571  0.1788580 -1.5851788 -1.2384283 -2.1859181
## 5  1.8685058  2.7229517 -2.9077182  2.6606939 -1.5963592 -2.2213492
## 6 -0.4807318 -1.2117369 -0.9358531 -2.5100758 -2.3803916 -0.7096854
##     stat143    stat144    stat145    stat146     stat147    stat148
## 1 1.9466263  2.2689433 -0.3597288 -0.6551386  1.65438592  0.6404466
## 2 1.3156421  2.4459090 -0.3790028  1.4858465 -0.07784461  1.0096149
## 3 0.1959563  2.3062942  1.8459278  2.6848175 -2.70935774 -1.2093409
## 4 1.7633296 -2.8171508  2.0902622 -2.6625464 -1.12600601 -2.1926479
## 5 0.3885758  1.8160636  2.8257299 -1.4526173  1.60679603  2.3807991
## 6 0.7623450  0.2692145 -2.4307463 -2.1244523 -2.67803812 -1.5273387
##      stat149   stat150    stat151    stat152    stat153    stat154
## 1  0.1583575 0.4755351  0.3213410  2.0241520  1.5720103 -0.1825875
## 2 -0.4311406 2.9577663  0.6937252  0.1397280  0.3775735 -1.1012636
## 3 -0.8352824 2.5716205  1.7528236  0.4326277 -2.2334397 -2.6265771
## 4 -2.8069143 1.8813509  2.3358023  0.1015632  1.2117474 -1.3714278
## 5 -1.6166265 1.1112266 -1.1998471  2.9316769 -2.1676455 -0.3411089
## 6 -0.2265472 2.7264354 -1.6746094 -2.3376281 -1.7022788 -1.2352397
##     stat155     stat156    stat157    stat158     stat159   stat160
## 1 -1.139657  0.07061254  0.5893906 -1.9920996 -2.83714366  2.249398
## 2 -2.041093  0.74047768  2.5415072 -1.2697256 -1.64364433 -2.448922
## 3 -1.219507 -0.55198693  0.4046920  1.2098547 -0.90412390 -1.934093
## 4  2.992191  2.33222485  2.0622969 -0.6714653  2.76836085 -1.431120
## 5 -2.362356 -1.23906672  0.4746319 -0.7849202  0.69399995  2.052411
## 6 -1.604499  1.31051409 -0.5164744  0.6288667  0.07899523 -2.287402
##      stat161    stat162    stat163    stat164    stat165    stat166
## 1  1.7182635 -1.2323593  2.7350423  1.0707235  1.1621544  0.9493989
## 2 -0.6247674  2.6740098  2.8211024  1.5561292 -1.1027147  1.0519739
## 3 -0.6230453 -0.7993517 -2.8318374 -1.1148673  1.4261659  0.5294309
## 4  1.7644744  0.1696584  1.2653207  0.6621516  0.9470508  0.1985014
## 5 -1.2070210  0.7243784  0.9736322  2.7426259 -2.6862383  1.6840212
## 6  2.3705316 -2.1667893 -0.2516685 -0.8425958 -1.9099342 -2.8607297
##      stat167    stat168    stat169     stat170     stat171    stat172
## 1  0.1146510  2.3872008  1.1180918 -0.95370555 -2.25076509  0.2348182
## 2  1.0760417 -2.0449336  0.9715676 -0.40173489 -0.11953555 -2.3107369
## 3  1.1735898  1.3860190 -2.2894719  0.06350347  0.29191551 -1.6079744
## 4  2.5511832  0.5446648  1.2694012 -0.84571201  0.79789722  0.2623538
## 5  2.2900002  2.6289782 -0.2783571  1.39032829 -0.55532032  1.0499046
## 6 -0.7513983  2.9617066 -2.2119520 -1.71958113 -0.01452018 -0.2751517
##       stat173   stat174     stat175    stat176     stat177    stat178
## 1  1.79366076 -1.920206 -0.38841942  0.8530301  1.64532077 -1.1354179
## 2 -0.07484659  1.337846  2.20911694  0.9616837 -2.80810070 -2.1136749
## 3 -1.05521810 -1.483741  0.06148359  2.3066039 -0.34688616  1.1840581
## 4  0.31460321  1.195741  2.97633862  1.1685091 -0.06346265  1.4205489
## 5 -1.39428365  2.458523  0.64836472 -1.0396386 -0.57828104 -0.5006818
## 6  2.31844401  1.239864 -2.06490874  0.7696204 -1.77586019  2.0855925
##      stat179    stat180     stat181    stat182    stat183    stat184
## 1  2.0018647  0.1476815 -1.27279520  1.9181504 -0.5297624 -2.9718938
## 2 -2.1351449  2.9012582 -1.09914911 -2.5488517 -2.8377736  1.4073374
## 3 -1.7819908  2.9902627  0.81908613  0.2503852  0.3712984 -2.1714024
## 4 -0.1026974 -2.4763253 -2.52645421  1.3096315  2.1458161 -1.5228094
## 5 -2.2298794  2.4465680 -0.70346898 -1.6997617  2.9178164 -0.3615532
## 6 -1.1168108  1.5552123 -0.01361342  1.7338791 -1.1104763  0.1882416
##      stat185    stat186   stat187    stat188    stat189    stat190
## 1 -0.1043832 -1.5047463  2.700351 -2.4780862 -1.9078265  0.9978108
## 2 -2.0310574 -0.5380074 -1.963275 -1.2221278 -2.4290681 -1.9515115
## 3  2.6727278  1.2688179 -1.399018 -2.9612138  2.6456394  2.0073323
## 4 -2.7796295  2.0682354  2.243727  0.4296881  0.1931333  2.2710960
## 5 -0.6231265  2.5833981  2.229041  0.8139584  1.4544131  1.8886451
## 6  2.7204690 -2.4469144 -1.421998  1.7477882 -0.1481806  0.6011560
##      stat191    stat192    stat193    stat194   stat195    stat196
## 1 -0.6644351  2.6270833 -1.1094601 -2.4200392  2.870713 -0.6590932
## 2 -0.6483142  1.4519118 -0.1963493 -2.3025322  1.255608  2.1617947
## 3 -1.5457382 -0.2977442 -1.7045015  0.7962404 -1.696063 -1.4771117
## 4 -1.1780495 -2.9747574 -1.1471518 -1.2377013 -1.010672 -2.6055975
## 5  2.8813178 -1.8964081 -1.2653487 -1.7839754 -2.872581  2.3033464
## 6  0.4437973  0.6599325 -1.4029555 -2.3118258 -1.792232  1.3934380
##       stat197    stat198    stat199    stat200    stat201    stat202
## 1 -0.83056986  0.9550526 -1.7025776 -2.8263099 -0.7023998  0.2272806
## 2 -1.42178249 -1.2471864  2.5723093 -0.0233496 -1.8975239  1.9472262
## 3 -0.19233958 -0.5161456  0.0279946 -1.2333704 -2.9672263 -2.8666208
## 4 -1.23145902  1.4728470 -0.4562025 -2.2983441 -1.5101184  0.2530525
## 5  1.85018563 -1.8269292 -0.6337969 -2.1473246  0.9909850  1.0950903
## 6 -0.09311061  0.5144456 -2.8178268 -2.7555969 -2.3546004 -1.0558939
##        stat203      stat204    stat205    stat206    stat207    stat208
## 1  1.166631220  0.007453276  2.9961641  1.5327307 -2.2293356 -0.9946009
## 2 -0.235396504  2.132749800  0.3707606  1.5604026 -1.0089217  2.1474257
## 3  0.003180946  2.229793310  2.7354040  0.8992231  2.9694967  2.3081024
## 4 -0.474482715 -1.584772230 -2.3224132 -0.9409741 -2.3179255  0.8032548
## 5  2.349412920 -1.276320220 -2.0203719 -1.1733509  1.0371852 -2.5086207
## 6  0.727436960 -0.960191786 -0.8964998 -1.6406623 -0.2330488  1.7993879
##      stat209    stat210   stat211    stat212    stat213    stat214
## 1 -2.2182105 -1.4099774 -1.656754  2.6602585 -2.9270992  1.1240714
## 2 -2.8932488 -1.1641679 -2.605423 -1.5650513  2.9523673  2.0266318
## 3 -1.8279589  0.0472350 -2.026734  2.5054367  0.9903042  0.3274105
## 4 -1.0878067  0.1171303  2.645891 -1.6775225  1.3452160  1.4694063
## 5 -0.8158175  0.4060950  0.912256  0.2925677  2.1610141  0.5679936
## 6 -2.2664354 -0.2061083 -1.435174  2.6645632  0.4216259 -0.6419122
##      stat215    stat216    stat217
## 1 -2.7510750 -0.5501796  1.2638469
## 2  2.8934650 -2.4099574 -1.2411407
## 3 -1.0947676  1.2852937  1.5411530
## 4  0.6343777  0.1345372  2.9102673
## 5  0.9908702  1.7909757 -2.0902610
## 6 -2.8113887 -1.0624912  0.2765074
features = features.highprec
#str(features) 

Checking correlations to evaluate removal of redundant features

corr.matrix = round(cor(features[sapply(features, is.numeric)]),2)

# filter out only highly correlated variables
threshold = 0.6
corr.matrix.tmp = corr.matrix
diag(corr.matrix.tmp) = 0
high.corr = apply(abs(corr.matrix.tmp) >= threshold, 1, any)
high.corr.matrix = corr.matrix.tmp[high.corr, high.corr]

DT::datatable(corr.matrix)
DT::datatable(high.corr.matrix)

Feature Names

feature.names = colnames(features)
drops <- c('JobName')
feature.names = feature.names[!(feature.names %in% drops)]
#str(feature.names)

Read and Clean Labels

labels = read.csv("../../Data/labels.csv")
#str(labels)
labels = labels[,c("JobName", output.var)]
summary(labels)
##       JobName           y3        
##  Job_00001:   1   Min.   : 95.91  
##  Job_00002:   1   1st Qu.:118.21  
##  Job_00003:   1   Median :123.99  
##  Job_00004:   1   Mean   :125.36  
##  Job_00005:   1   3rd Qu.:131.06  
##  Job_00006:   1   Max.   :193.73  
##  (Other)  :9994   NA's   :2497

Merge Datasets

data <- merge(features, labels, by = 'JobName')
drops <- c('JobName')
data = data[,(!colnames(data) %in% drops)]
#str(data)

Transformations

if (transform.abs == TRUE){
  data[,label.names] = 10^(data[,label.names]/20)
  data = filter(data, y3 < 1E7)
}


#str(data)
if (log.pred == TRUE){
  data[label.names] = log(data[alt.scale.label.name],10)
  
  drops = c(alt.scale.label.name)
  data = data[!(names(data) %in% drops)]
}
#str(data)

Remove NA Cases

data = data[complete.cases(data),]

Exploratory Data Analysis

Check correlation of Label with Featires

if (eda == TRUE){
  corr.to.label =round(cor(dplyr::select(data,-one_of(label.names)),dplyr::select_at(data,label.names)),4)
  DT::datatable(corr.to.label)
}

Multicollinearity - VIF

if (eda == TRUE){
  vifDF = usdm::vif(select_at(data,feature.names)) %>% arrange(desc(VIF))
  head(vifDF,10)
}

Scatterplots

panel.hist <- function(x, ...)
{
    usr <- par("usr"); on.exit(par(usr))
    par(usr = c(usr[1:2], 0, 1.5) )
    h <- hist(x, plot = FALSE)
    breaks <- h$breaks; nB <- length(breaks)
    y <- h$counts; y <- y/max(y)
    rect(breaks[-nB], 0, breaks[-1], y, col = "cyan", ...)
}
if (eda == TRUE){
  histogram(data[ ,label.names])
  #hist(data[complete.cases(data),alt.scale.label.name])
}
# https://stackoverflow.com/questions/24648729/plot-one-numeric-variable-against-n-numeric-variables-in-n-plots
ind.pairs.plot <- function(data, xvars=NULL, yvar)
{
    df <- data
    if (is.null(xvars)) {
        xvars = names(data[which(names(data)!=yvar)])       
    }   

    #choose a format to display charts
    ncharts <- length(xvars) 
    
    for(i in 1:ncharts){    
        plot(df[,xvars[i]],df[,yvar], xlab = xvars[i], ylab = yvar)
    }
}

if (eda == TRUE){
  ind.pairs.plot(data, feature.names, label.names)
}


# 
# pl <- ggplot(data, aes(x=x18, y = y3))
# pl2 <- pl + geom_point(aes(alpha = 0.1)) # default color gradient based on 'hp'
# print(pl2)

Feature Engineering

if(eda ==FALSE){
  # x18 may need transformations
  plot(data[,'x18'], data[,label.names], main = "Original Scatter Plot vs. x18", ylab = label.names, xlab = 'x18')
  plot(sqrt(data[,'x18']), data[,label.names], main = "Original Scatter Plot vs. sqrt(x18)", ylab = label.names, xlab = 'sqrt(x18)')
  
  # transforming x18
  data$sqrt.x18 = sqrt(data$x18)
  data = dplyr::select(data,-one_of('x18'))
  
  # what about x7, x9?
  # x11 looks like data is at discrete points after a while. Will this be a problem?
}

Modeling

Train Test Split

data = data[sample(nrow(data)),] # randomly shuffle data
split = sample.split(data[,label.names], SplitRatio = 0.8)

data.train = subset(data, split == TRUE)
data.test = subset(data, split == FALSE)

Common Functions

plot.diagnostics <-  function(model, train) {
  plot(model)
  
  residuals = resid(model) # Plotted above in plot(lm.out)
  r.standard = rstandard(model)
  r.student = rstudent(model)

  plot(predict(model,train),r.student,
      ylab="Student Residuals", xlab="Predicted Values", 
      main="Student Residual Plot") 
  abline(0, 0)
  
  plot(predict(model, train),r.standard,
      ylab="Standard Residuals", xlab="Predicted Values", 
      main="Standard Residual Plot") 
  abline(0, 0)
  abline(2, 0)
  abline(-2, 0)
  
  # Histogram
  hist(r.student, freq=FALSE, main="Distribution of Studentized Residuals", 
  xlab="Studentized Residuals", ylab="Density", ylim=c(0,0.5))

  # Create range of x-values for normal curve
  xfit <- seq(min(r.student)-1, max(r.student)+1, length=40)

  # Generate values from the normal distribution at the specified values
  yfit <- (dnorm(xfit))

  # Add the normal curve
  lines(xfit, yfit, ylim=c(0,0.5))
  
  
  # http://www.stat.columbia.edu/~martin/W2024/R7.pdf
  # Influential plots
  inf.meas = influence.measures(model)
  # print (summary(inf.meas)) # too much data
  
  # Leverage plot
  lev = hat(model.matrix(model))
  plot(lev, ylab = 'Leverage - check')
  
  # Cook's Distance
  cd = cooks.distance(model)
  plot(cd,ylab="Cooks distances")
  abline(4/nrow(train),0)
  abline(1,0)
  
  print (paste("Number of data points that have Cook's D > 4/n: ", length(cd[cd > 4/nrow(train)]), sep = "")) 
  print (paste("Number of data points that have Cook's D > 1: ", length(cd[cd > 1]), sep = "")) 
  return(cd)
}

train.caret.glmselect = function(formula, data, method
                                 ,subopt = NULL, feature.names
                                 , train.control = NULL, tune.grid = NULL, pre.proc = NULL){
  
  if(is.null(train.control)){
    train.control <- trainControl(method = "cv"
                              ,number = 10
                              ,search = "grid"
                              ,verboseIter = TRUE
                              ,allowParallel = TRUE
                              )
  }
  
  if(is.null(tune.grid)){
    if (method == 'leapForward' | method == 'leapBackward' | method == 'leapSeq'){
      tune.grid = data.frame(nvmax = 1:length(feature.names))
    }
    if (method == 'glmnet' && subopt == 'LASSO'){
      # Will only show 1 Lambda value during training, but that is OK
      # https://stackoverflow.com/questions/47526544/why-need-to-tune-lambda-with-carettrain-method-glmnet-and-cv-glmnet
      # Another option for LASSO is this: https://github.com/topepo/caret/blob/master/RegressionTests/Code/lasso.R
      lambda = 10^seq(-2,0, length =100)
      alpha = c(1)
      tune.grid = expand.grid(alpha = alpha,lambda = lambda)
    }
    if (method == 'lars'){
      # https://github.com/topepo/caret/blob/master/RegressionTests/Code/lars.R
      fraction = seq(0, 1, length = 100)
      tune.grid = expand.grid(fraction = fraction)
      pre.proc = c("center", "scale") 
    }
  }
  
  # http://sshaikh.org/2015/05/06/parallelize-machine-learning-in-r-with-multi-core-cpus/
  cl <- makeCluster(detectCores()*0.75) # use 75% of cores only, leave rest for other tasks
  registerDoParallel(cl)

  set.seed(1) 
  # note that the seed has to actually be set just before this function is called
  # settign is above just not ensure reproducibility for some reason
  model.caret <- caret::train(formula
                              , data = data
                              , method = method
                              , tuneGrid = tune.grid
                              , trControl = train.control
                              , preProc = pre.proc
                              )
  
  stopCluster(cl)
  registerDoSEQ() # register sequential engine in case you are not using this function anymore
  
  if (method == 'leapForward' | method == 'leapBackward' | method == 'leapSeq'){
    print(model.caret$results) # all model results
    print(model.caret$bestTune) # best model
  
    model = model.caret$finalModel
  
    # Residuals Plot  MMORO #
    dataPlot=data.frame(pred=predict(model.caret,data),res=resid(model.caret))
    residPlot = ggplot(dataPlot,aes(x=pred,y=res)) +
      geom_point(color='light blue',alpha=0.7) +
      geom_smooth()
    plot(residPlot)
    
    # Provides the coefficients of the best model
    id = rownames(model.caret$bestTune)
    message("Coefficients of final model:")
    print (coef(model, id = id))
    
    # Need to find alternate to plotting diagnostic plots
    # plot.diagnostics(model.forward,data.train)
    # plot(model.forward,labels = colnames(data.train),scale=c("bic")) ## too many variables
    return(list(model = model,id = id))
  }
  if (method == 'glmnet' && subopt == 'LASSO'){
    print(model.caret)
    print(plot(model.caret))
    print(model.caret$bestTune)
    
    # Residuals Plot #
    dataPlot=data.frame(pred=predict(model.caret,data),res=resid(model.caret))
    residPlot = ggplot(dataPlot,aes(x=pred,y=res)) +
      geom_point(color='light blue',alpha=0.7) +
      geom_smooth()
    plot(residPlot)
    
    id = NULL # not really needed but added for consistency
    return(list(model = model.caret,id = id))
  }
  if (method == 'lars'){
    print(model.caret)
    print(plot(model.caret))
    print(model.caret$bestTune)
    
     # Residuals Plot #
    dataPlot=data.frame(pred=predict(model.caret,data),res=resid(model.caret))
    residPlot = ggplot(dataPlot,aes(x=pred,y=res)) +
      geom_point(color='light blue',alpha=0.7) +
      geom_smooth()
    plot(residPlot)
    id = NULL # not really needed but added for consistency
    return(list(model = model.caret,id = id))
  }
}

# https://stackoverflow.com/questions/48265743/linear-model-subset-selection-goodness-of-fit-with-k-fold-cross-validation
# changes slightly since call[[2]] was just returning "formula" without actually returnign the value in formula
predict.regsubsets <- function(object, newdata, id, formula, ...) {
    #form <- as.formula(object$call[[2]])
    mat <- model.matrix(formula, newdata) # adds intercept and expands any interaction terms
    coefi <- coef(object, id = id)
    xvars <- names(coefi)
    return(mat[,xvars]%*%coefi)
}
  
test.model = function(model, test, level=0.95
                      ,draw.limits = FALSE, good = 0.1, ok = 0.15
                      ,method = NULL, subopt = NULL
                      ,id = NULL, formula, feature.names, label.names){
  ## if using caret for glm select equivalent functionality, 
  ## need to set regsubset = TRUE, pass id of best model through id variable, 
  ## and pass formula (full is ok as it will select subset of variables from there)
  if (is.null(method)){
    pred = predict(model, newdata=test, interval="confidence", level = level) 
  }
  
  if (method == 'leapForward' | method == 'leapBackward' | method == 'leapSeq'){
    pred = predict.regsubsets(model, newdata = test, id = id, formula = formula)
  }
  
  if (method == 'glmnet' && subopt == 'LASSO'){
    xtest = as.matrix(test[,feature.names]) 
    pred=as.data.frame(predict(model, xtest))
  }
  
  if (method == 'lars'){
    pred=as.data.frame(predict(model, newdata = test))
  }
    
  # Summary of predicted values
  print ("Summary of predicted values: ")
  print(summary(pred[,1]))

  test.mse = mean((test[,label.names]-pred[,1])^2)
  print (paste(method, subopt, "Test MSE:", test.mse, sep=" "))

  plot(test[,label.names],pred[,1],xlab = "Actual", ylab = "Predicted")
  abline(0,(1+good),col='green', lwd = 3)
  abline(0,(1-good),col='green', lwd = 3)
  abline(0,(1+ok),col='blue', lwd = 3)
  abline(0,(1-ok),col='blue', lwd = 3)
  
}

Setup Formulae

n <- names(data.train)
formula <- as.formula(paste(paste(n[n %in% label.names], collapse = " + ")," ~", paste(n[!n %in% label.names], collapse = " + "))) 
grand.mean.formula = as.formula(paste(paste(n[n %in% label.names], collapse = " + ")," ~ 1"))
print(formula)
## y3 ~ x1 + x2 + x3 + x4 + x5 + x6 + x7 + x8 + x9 + x10 + x11 + 
##     x12 + x13 + x14 + x15 + x16 + x17 + x19 + x20 + x21 + x22 + 
##     x23 + stat1 + stat2 + stat3 + stat4 + stat5 + stat6 + stat7 + 
##     stat8 + stat9 + stat10 + stat11 + stat12 + stat13 + stat14 + 
##     stat15 + stat16 + stat17 + stat18 + stat19 + stat20 + stat21 + 
##     stat22 + stat23 + stat24 + stat25 + stat26 + stat27 + stat28 + 
##     stat29 + stat30 + stat31 + stat32 + stat33 + stat34 + stat35 + 
##     stat36 + stat37 + stat38 + stat39 + stat40 + stat41 + stat42 + 
##     stat43 + stat44 + stat45 + stat46 + stat47 + stat48 + stat49 + 
##     stat50 + stat51 + stat52 + stat53 + stat54 + stat55 + stat56 + 
##     stat57 + stat58 + stat59 + stat60 + stat61 + stat62 + stat63 + 
##     stat64 + stat65 + stat66 + stat67 + stat68 + stat69 + stat70 + 
##     stat71 + stat72 + stat73 + stat74 + stat75 + stat76 + stat77 + 
##     stat78 + stat79 + stat80 + stat81 + stat82 + stat83 + stat84 + 
##     stat85 + stat86 + stat87 + stat88 + stat89 + stat90 + stat91 + 
##     stat92 + stat93 + stat94 + stat95 + stat96 + stat97 + stat98 + 
##     stat99 + stat100 + stat101 + stat102 + stat103 + stat104 + 
##     stat105 + stat106 + stat107 + stat108 + stat109 + stat110 + 
##     stat111 + stat112 + stat113 + stat114 + stat115 + stat116 + 
##     stat117 + stat118 + stat119 + stat120 + stat121 + stat122 + 
##     stat123 + stat124 + stat125 + stat126 + stat127 + stat128 + 
##     stat129 + stat130 + stat131 + stat132 + stat133 + stat134 + 
##     stat135 + stat136 + stat137 + stat138 + stat139 + stat140 + 
##     stat141 + stat142 + stat143 + stat144 + stat145 + stat146 + 
##     stat147 + stat148 + stat149 + stat150 + stat151 + stat152 + 
##     stat153 + stat154 + stat155 + stat156 + stat157 + stat158 + 
##     stat159 + stat160 + stat161 + stat162 + stat163 + stat164 + 
##     stat165 + stat166 + stat167 + stat168 + stat169 + stat170 + 
##     stat171 + stat172 + stat173 + stat174 + stat175 + stat176 + 
##     stat177 + stat178 + stat179 + stat180 + stat181 + stat182 + 
##     stat183 + stat184 + stat185 + stat186 + stat187 + stat188 + 
##     stat189 + stat190 + stat191 + stat192 + stat193 + stat194 + 
##     stat195 + stat196 + stat197 + stat198 + stat199 + stat200 + 
##     stat201 + stat202 + stat203 + stat204 + stat205 + stat206 + 
##     stat207 + stat208 + stat209 + stat210 + stat211 + stat212 + 
##     stat213 + stat214 + stat215 + stat216 + stat217 + sqrt.x18
print(grand.mean.formula)
## y3 ~ 1
# Update feature.names because we may have transformed some features
feature.names = n[!n %in% label.names]

Full & Grand Means Model

model.full = lm(formula , data.train)
summary(model.full)
## 
## Call:
## lm(formula = formula, data = data.train)
## 
## Residuals:
##     Min      1Q  Median      3Q     Max 
## -21.502  -6.117  -1.805   4.414  56.338 
## 
## Coefficients:
##               Estimate Std. Error t value Pr(>|t|)    
## (Intercept)  8.825e+01  2.787e+00  31.665  < 2e-16 ***
## x1          -2.320e-01  1.907e-01  -1.217 0.223747    
## x2           1.667e-01  1.219e-01   1.368 0.171488    
## x3           1.072e-02  3.324e-02   0.323 0.747008    
## x4          -1.278e-02  2.639e-03  -4.842 1.32e-06 ***
## x5           4.903e-02  8.656e-02   0.566 0.571163    
## x6           2.305e-01  1.739e-01   1.326 0.185036    
## x7           3.312e+00  1.868e-01  17.728  < 2e-16 ***
## x8           1.244e-01  4.336e-02   2.868 0.004142 ** 
## x9           9.609e-01  9.677e-02   9.929  < 2e-16 ***
## x10          3.145e-01  9.053e-02   3.474 0.000517 ***
## x11          5.452e+07  2.167e+07   2.516 0.011905 *  
## x12         -7.120e-02  5.486e-02  -1.298 0.194424    
## x13          2.580e-02  2.194e-02   1.176 0.239619    
## x14         -2.396e-01  9.492e-02  -2.525 0.011610 *  
## x15          2.946e-02  8.993e-02   0.328 0.743203    
## x16          2.764e-01  6.202e-02   4.457 8.47e-06 ***
## x17          4.480e-01  9.511e-02   4.710 2.53e-06 ***
## x19          4.363e-02  4.808e-02   0.907 0.364199    
## x20         -1.396e-01  3.353e-01  -0.416 0.677155    
## x21          3.960e-02  1.237e-02   3.200 0.001380 ** 
## x22         -1.354e-01  1.008e-01  -1.343 0.179445    
## x23          4.060e-02  9.540e-02   0.426 0.670418    
## stat1        3.225e-02  7.251e-02   0.445 0.656466    
## stat2        8.969e-02  7.237e-02   1.239 0.215266    
## stat3        1.055e-01  7.256e-02   1.454 0.146125    
## stat4       -1.438e-01  7.278e-02  -1.976 0.048188 *  
## stat5       -4.047e-03  7.278e-02  -0.056 0.955656    
## stat6       -2.827e-02  7.240e-02  -0.390 0.696216    
## stat7       -3.069e-02  7.262e-02  -0.423 0.672639    
## stat8       -2.322e-03  7.276e-02  -0.032 0.974547    
## stat9       -6.983e-02  7.216e-02  -0.968 0.333229    
## stat10      -5.304e-02  7.274e-02  -0.729 0.465870    
## stat11      -4.061e-02  7.321e-02  -0.555 0.579179    
## stat12      -9.540e-03  7.243e-02  -0.132 0.895212    
## stat13      -1.915e-01  7.213e-02  -2.654 0.007965 ** 
## stat14      -2.844e-01  7.228e-02  -3.935 8.43e-05 ***
## stat15      -8.377e-02  7.206e-02  -1.163 0.245079    
## stat16       8.093e-02  7.220e-02   1.121 0.262387    
## stat17      -7.472e-02  7.197e-02  -1.038 0.299163    
## stat18      -6.468e-02  7.224e-02  -0.895 0.370639    
## stat19       8.778e-02  7.223e-02   1.215 0.224314    
## stat20      -5.830e-02  7.242e-02  -0.805 0.420830    
## stat21      -5.658e-02  7.316e-02  -0.773 0.439326    
## stat22      -1.109e-01  7.239e-02  -1.532 0.125598    
## stat23       1.391e-01  7.201e-02   1.932 0.053417 .  
## stat24      -1.521e-01  7.244e-02  -2.100 0.035748 *  
## stat25      -1.725e-01  7.231e-02  -2.385 0.017091 *  
## stat26      -7.795e-02  7.242e-02  -1.076 0.281788    
## stat27       3.888e-02  7.250e-02   0.536 0.591786    
## stat28       3.086e-02  7.241e-02   0.426 0.669990    
## stat29       1.092e-01  7.260e-02   1.505 0.132442    
## stat30       4.929e-02  7.285e-02   0.677 0.498704    
## stat31      -7.019e-02  7.301e-02  -0.961 0.336402    
## stat32       3.670e-02  7.260e-02   0.505 0.613263    
## stat33      -7.833e-02  7.230e-02  -1.083 0.278702    
## stat34       1.291e-02  7.260e-02   0.178 0.858906    
## stat35      -8.799e-02  7.226e-02  -1.218 0.223374    
## stat36      -1.414e-02  7.193e-02  -0.197 0.844209    
## stat37      -9.002e-02  7.296e-02  -1.234 0.217342    
## stat38       1.460e-01  7.230e-02   2.020 0.043442 *  
## stat39      -1.404e-01  7.218e-02  -1.945 0.051828 .  
## stat40       1.373e-02  7.281e-02   0.189 0.850437    
## stat41      -1.385e-01  7.236e-02  -1.914 0.055637 .  
## stat42      -6.215e-02  7.204e-02  -0.863 0.388346    
## stat43      -4.051e-02  7.213e-02  -0.562 0.574430    
## stat44       4.967e-02  7.236e-02   0.686 0.492436    
## stat45      -1.099e-01  7.269e-02  -1.512 0.130637    
## stat46       7.452e-02  7.264e-02   1.026 0.304931    
## stat47       1.071e-01  7.295e-02   1.468 0.142235    
## stat48       1.106e-01  7.271e-02   1.521 0.128201    
## stat49       6.335e-02  7.203e-02   0.880 0.379165    
## stat50       6.341e-02  7.142e-02   0.888 0.374616    
## stat51       1.439e-01  7.228e-02   1.991 0.046509 *  
## stat52       2.014e-03  7.243e-02   0.028 0.977813    
## stat53      -6.237e-02  7.316e-02  -0.852 0.394014    
## stat54      -9.075e-02  7.279e-02  -1.247 0.212565    
## stat55       8.632e-02  7.175e-02   1.203 0.229001    
## stat56      -4.174e-02  7.294e-02  -0.572 0.567199    
## stat57      -5.133e-02  7.182e-02  -0.715 0.474808    
## stat58       1.883e-02  7.151e-02   0.263 0.792276    
## stat59       8.842e-02  7.247e-02   1.220 0.222512    
## stat60       2.230e-01  7.282e-02   3.062 0.002209 ** 
## stat61      -8.237e-02  7.262e-02  -1.134 0.256711    
## stat62      -8.644e-02  7.222e-02  -1.197 0.231400    
## stat63       7.546e-02  7.271e-02   1.038 0.299433    
## stat64      -7.450e-02  7.171e-02  -1.039 0.298892    
## stat65      -1.213e-01  7.235e-02  -1.676 0.093751 .  
## stat66       1.186e-01  7.290e-02   1.627 0.103734    
## stat67      -5.376e-02  7.276e-02  -0.739 0.460056    
## stat68      -2.551e-02  7.308e-02  -0.349 0.727020    
## stat69       4.040e-03  7.249e-02   0.056 0.955559    
## stat70       3.379e-02  7.192e-02   0.470 0.638484    
## stat71       1.855e-02  7.212e-02   0.257 0.797057    
## stat72      -2.520e-02  7.274e-02  -0.346 0.729036    
## stat73       5.660e-02  7.317e-02   0.774 0.439199    
## stat74      -7.429e-03  7.265e-02  -0.102 0.918557    
## stat75      -7.046e-02  7.272e-02  -0.969 0.332650    
## stat76       1.625e-02  7.255e-02   0.224 0.822774    
## stat77       7.897e-03  7.239e-02   0.109 0.913139    
## stat78      -2.606e-02  7.263e-02  -0.359 0.719750    
## stat79      -2.538e-02  7.217e-02  -0.352 0.725103    
## stat80       2.142e-02  7.304e-02   0.293 0.769358    
## stat81       1.396e-01  7.293e-02   1.914 0.055677 .  
## stat82       1.540e-02  7.238e-02   0.213 0.831534    
## stat83       2.034e-02  7.231e-02   0.281 0.778529    
## stat84      -3.312e-02  7.228e-02  -0.458 0.646828    
## stat85      -2.204e-02  7.258e-02  -0.304 0.761409    
## stat86      -1.417e-04  7.281e-02  -0.002 0.998447    
## stat87      -4.484e-02  7.283e-02  -0.616 0.538168    
## stat88      -8.369e-02  7.217e-02  -1.160 0.246270    
## stat89      -9.484e-02  7.189e-02  -1.319 0.187135    
## stat90      -4.933e-02  7.255e-02  -0.680 0.496555    
## stat91      -1.033e-01  7.175e-02  -1.440 0.149947    
## stat92      -9.999e-02  7.233e-02  -1.382 0.166893    
## stat93      -8.119e-02  7.315e-02  -1.110 0.267101    
## stat94      -6.798e-02  7.270e-02  -0.935 0.349796    
## stat95      -3.186e-02  7.226e-02  -0.441 0.659325    
## stat96      -1.128e-01  7.217e-02  -1.563 0.118146    
## stat97      -1.971e-03  7.204e-02  -0.027 0.978172    
## stat98       9.547e-01  7.108e-02  13.432  < 2e-16 ***
## stat99       7.922e-02  7.260e-02   1.091 0.275277    
## stat100      1.608e-01  7.234e-02   2.224 0.026219 *  
## stat101     -4.656e-02  7.290e-02  -0.639 0.523037    
## stat102      3.043e-03  7.251e-02   0.042 0.966531    
## stat103     -9.097e-02  7.340e-02  -1.239 0.215242    
## stat104     -9.652e-02  7.247e-02  -1.332 0.182935    
## stat105      1.360e-01  7.206e-02   1.887 0.059254 .  
## stat106     -1.019e-01  7.248e-02  -1.406 0.159630    
## stat107     -8.523e-03  7.223e-02  -0.118 0.906070    
## stat108     -3.265e-02  7.271e-02  -0.449 0.653440    
## stat109      5.340e-02  7.238e-02   0.738 0.460707    
## stat110     -9.451e-01  7.215e-02 -13.100  < 2e-16 ***
## stat111     -4.018e-03  7.213e-02  -0.056 0.955584    
## stat112      3.551e-02  7.327e-02   0.485 0.627926    
## stat113     -7.316e-03  7.305e-02  -0.100 0.920230    
## stat114      3.321e-02  7.230e-02   0.459 0.646042    
## stat115      5.756e-02  7.217e-02   0.798 0.425153    
## stat116      2.900e-02  7.306e-02   0.397 0.691392    
## stat117      2.466e-02  7.293e-02   0.338 0.735317    
## stat118     -7.623e-02  7.211e-02  -1.057 0.290513    
## stat119      2.174e-02  7.221e-02   0.301 0.763326    
## stat120      4.794e-02  7.208e-02   0.665 0.505989    
## stat121      6.168e-02  7.295e-02   0.845 0.397880    
## stat122     -5.506e-02  7.230e-02  -0.762 0.446360    
## stat123     -1.940e-02  7.335e-02  -0.264 0.791440    
## stat124     -1.505e-04  7.252e-02  -0.002 0.998345    
## stat125      9.547e-02  7.254e-02   1.316 0.188210    
## stat126      2.905e-02  7.195e-02   0.404 0.686370    
## stat127      2.455e-02  7.238e-02   0.339 0.734493    
## stat128     -6.271e-02  7.237e-02  -0.867 0.386197    
## stat129     -1.232e-02  7.222e-02  -0.171 0.864545    
## stat130      1.118e-01  7.255e-02   1.541 0.123273    
## stat131     -6.583e-02  7.262e-02  -0.906 0.364745    
## stat132     -4.165e-02  7.170e-02  -0.581 0.561320    
## stat133     -3.139e-02  7.272e-02  -0.432 0.666030    
## stat134     -9.479e-02  7.191e-02  -1.318 0.187466    
## stat135     -4.386e-02  7.253e-02  -0.605 0.545449    
## stat136     -4.759e-02  7.295e-02  -0.652 0.514160    
## stat137     -2.344e-03  7.196e-02  -0.033 0.974013    
## stat138      3.392e-02  7.222e-02   0.470 0.638568    
## stat139      3.415e-02  7.259e-02   0.470 0.638082    
## stat140     -3.790e-02  7.227e-02  -0.524 0.600050    
## stat141      4.155e-02  7.185e-02   0.578 0.563147    
## stat142     -1.571e-02  7.334e-02  -0.214 0.830407    
## stat143      4.738e-02  7.235e-02   0.655 0.512613    
## stat144      1.631e-01  7.175e-02   2.274 0.023030 *  
## stat145      5.432e-02  7.343e-02   0.740 0.459485    
## stat146     -1.127e-01  7.306e-02  -1.543 0.122845    
## stat147     -7.466e-02  7.328e-02  -1.019 0.308273    
## stat148     -2.489e-02  7.142e-02  -0.349 0.727471    
## stat149     -2.118e-01  7.320e-02  -2.894 0.003821 ** 
## stat150     -1.225e-02  7.312e-02  -0.168 0.866955    
## stat151     -1.178e-01  7.368e-02  -1.599 0.109822    
## stat152     -5.257e-02  7.200e-02  -0.730 0.465337    
## stat153      3.992e-02  7.354e-02   0.543 0.587255    
## stat154      9.087e-02  7.303e-02   1.244 0.213473    
## stat155      2.563e-02  7.243e-02   0.354 0.723402    
## stat156      1.110e-01  7.313e-02   1.517 0.129250    
## stat157     -2.562e-02  7.235e-02  -0.354 0.723261    
## stat158     -1.937e-02  7.378e-02  -0.263 0.792897    
## stat159     -1.684e-02  7.223e-02  -0.233 0.815629    
## stat160     -2.547e-02  7.266e-02  -0.351 0.725951    
## stat161      1.132e-02  7.296e-02   0.155 0.876709    
## stat162     -6.291e-03  7.206e-02  -0.087 0.930436    
## stat163      3.706e-02  7.296e-02   0.508 0.611532    
## stat164      1.873e-02  7.279e-02   0.257 0.796924    
## stat165      3.490e-02  7.184e-02   0.486 0.627124    
## stat166     -8.238e-02  7.171e-02  -1.149 0.250734    
## stat167     -7.347e-02  7.218e-02  -1.018 0.308769    
## stat168     -9.786e-03  7.237e-02  -0.135 0.892435    
## stat169      5.296e-02  7.284e-02   0.727 0.467232    
## stat170     -7.460e-03  7.263e-02  -0.103 0.918194    
## stat171     -8.802e-03  7.334e-02  -0.120 0.904478    
## stat172      4.281e-02  7.189e-02   0.595 0.551577    
## stat173     -6.928e-02  7.295e-02  -0.950 0.342309    
## stat174     -4.757e-03  7.209e-02  -0.066 0.947385    
## stat175     -9.221e-02  7.285e-02  -1.266 0.205601    
## stat176      2.940e-02  7.219e-02   0.407 0.683871    
## stat177      1.812e-02  7.271e-02   0.249 0.803166    
## stat178      1.170e-02  7.358e-02   0.159 0.873632    
## stat179      2.680e-03  7.238e-02   0.037 0.970462    
## stat180     -1.081e-02  7.152e-02  -0.151 0.879889    
## stat181      7.393e-03  7.291e-02   0.101 0.919240    
## stat182     -1.477e-02  7.292e-02  -0.203 0.839488    
## stat183      6.013e-02  7.220e-02   0.833 0.404991    
## stat184     -7.518e-02  7.321e-02  -1.027 0.304517    
## stat185     -3.010e-02  7.202e-02  -0.418 0.676037    
## stat186     -7.498e-02  7.300e-02  -1.027 0.304392    
## stat187     -6.789e-02  7.233e-02  -0.939 0.347925    
## stat188     -2.282e-02  7.194e-02  -0.317 0.751106    
## stat189     -2.529e-02  7.239e-02  -0.349 0.726869    
## stat190     -3.252e-02  7.230e-02  -0.450 0.652849    
## stat191     -1.376e-01  7.242e-02  -1.900 0.057515 .  
## stat192      6.202e-02  7.317e-02   0.848 0.396645    
## stat193     -9.605e-03  7.320e-02  -0.131 0.895602    
## stat194      2.088e-02  7.218e-02   0.289 0.772352    
## stat195      1.419e-01  7.208e-02   1.969 0.049052 *  
## stat196     -3.608e-02  7.328e-02  -0.492 0.622511    
## stat197     -1.957e-02  7.164e-02  -0.273 0.784734    
## stat198     -1.161e-01  7.256e-02  -1.600 0.109709    
## stat199      1.039e-01  7.195e-02   1.444 0.148845    
## stat200     -1.090e-01  7.182e-02  -1.518 0.129078    
## stat201     -2.455e-02  7.249e-02  -0.339 0.734847    
## stat202     -9.994e-02  7.333e-02  -1.363 0.172961    
## stat203      9.070e-03  7.202e-02   0.126 0.899777    
## stat204     -1.423e-01  7.233e-02  -1.968 0.049114 *  
## stat205     -1.376e-01  7.214e-02  -1.907 0.056589 .  
## stat206     -6.280e-02  7.248e-02  -0.866 0.386283    
## stat207      7.971e-02  7.248e-02   1.100 0.271506    
## stat208     -9.804e-03  7.265e-02  -0.135 0.892656    
## stat209     -4.080e-02  7.222e-02  -0.565 0.572131    
## stat210     -5.939e-02  7.262e-02  -0.818 0.413467    
## stat211     -5.413e-02  7.241e-02  -0.748 0.454754    
## stat212      2.444e-02  7.240e-02   0.338 0.735664    
## stat213     -1.069e-02  7.307e-02  -0.146 0.883671    
## stat214     -1.517e-01  7.216e-02  -2.102 0.035565 *  
## stat215     -9.782e-02  7.282e-02  -1.343 0.179246    
## stat216     -3.596e-02  7.262e-02  -0.495 0.620439    
## stat217      1.334e-01  7.258e-02   1.838 0.066079 .  
## sqrt.x18     7.672e+00  2.747e-01  27.929  < 2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 9.527 on 5761 degrees of freedom
## Multiple R-squared:  0.2532, Adjusted R-squared:  0.2221 
## F-statistic: 8.138 on 240 and 5761 DF,  p-value: < 2.2e-16
cd.full = plot.diagnostics(model.full, data.train)

## [1] "Number of data points that have Cook's D > 4/n: 290"
## [1] "Number of data points that have Cook's D > 1: 0"

Checking with removal of high influence points

high.cd = names(cd.full[cd.full > 4/nrow(data.train)])
data.train2 = data.train[!(rownames(data.train)) %in% high.cd,]
model.full2 = lm(formula , data.train2)
summary(model.full2)
## 
## Call:
## lm(formula = formula, data = data.train2)
## 
## Residuals:
##     Min      1Q  Median      3Q     Max 
## -18.275  -5.058  -1.015   4.618  21.568 
## 
## Coefficients:
##               Estimate Std. Error t value Pr(>|t|)    
## (Intercept)  8.627e+01  2.207e+00  39.083  < 2e-16 ***
## x1          -1.808e-01  1.513e-01  -1.195 0.232269    
## x2           1.376e-01  9.646e-02   1.426 0.153805    
## x3           2.865e-03  2.626e-02   0.109 0.913139    
## x4          -1.452e-02  2.092e-03  -6.941 4.36e-12 ***
## x5           8.107e-02  6.853e-02   1.183 0.236886    
## x6           1.003e-01  1.376e-01   0.729 0.465809    
## x7           3.397e+00  1.479e-01  22.967  < 2e-16 ***
## x8           1.401e-01  3.440e-02   4.071 4.75e-05 ***
## x9           9.279e-01  7.639e-02  12.147  < 2e-16 ***
## x10          4.264e-01  7.188e-02   5.932 3.17e-09 ***
## x11          5.693e+07  1.716e+07   3.318 0.000911 ***
## x12         -5.983e-02  4.336e-02  -1.380 0.167748    
## x13          3.932e-02  1.742e-02   2.258 0.024010 *  
## x14         -8.512e-02  7.510e-02  -1.133 0.257098    
## x15          2.541e-02  7.126e-02   0.357 0.721395    
## x16          2.484e-01  4.917e-02   5.052 4.51e-07 ***
## x17          4.312e-01  7.531e-02   5.726 1.08e-08 ***
## x19          4.512e-02  3.812e-02   1.184 0.236584    
## x20         -8.789e-02  2.659e-01  -0.330 0.741058    
## x21          3.850e-02  9.797e-03   3.930 8.59e-05 ***
## x22         -1.651e-01  7.972e-02  -2.071 0.038440 *  
## x23          8.793e-02  7.566e-02   1.162 0.245216    
## stat1       -5.557e-03  5.738e-02  -0.097 0.922853    
## stat2        1.152e-01  5.727e-02   2.012 0.044250 *  
## stat3        1.232e-01  5.745e-02   2.145 0.031974 *  
## stat4       -1.410e-01  5.780e-02  -2.440 0.014707 *  
## stat5        2.594e-03  5.773e-02   0.045 0.964162    
## stat6       -5.634e-02  5.732e-02  -0.983 0.325711    
## stat7       -8.444e-02  5.733e-02  -1.473 0.140825    
## stat8       -3.210e-02  5.759e-02  -0.557 0.577331    
## stat9       -4.814e-02  5.720e-02  -0.842 0.400096    
## stat10      -2.967e-02  5.752e-02  -0.516 0.605956    
## stat11      -9.349e-02  5.799e-02  -1.612 0.107002    
## stat12      -1.870e-02  5.732e-02  -0.326 0.744285    
## stat13      -1.835e-01  5.706e-02  -3.216 0.001306 ** 
## stat14      -3.333e-01  5.716e-02  -5.831 5.82e-09 ***
## stat15      -1.303e-01  5.710e-02  -2.281 0.022572 *  
## stat16       7.715e-03  5.707e-02   0.135 0.892474    
## stat17      -3.450e-02  5.707e-02  -0.605 0.545479    
## stat18      -4.885e-02  5.711e-02  -0.855 0.392427    
## stat19       2.238e-02  5.733e-02   0.390 0.696301    
## stat20       7.403e-04  5.738e-02   0.013 0.989707    
## stat21      -5.914e-02  5.796e-02  -1.020 0.307595    
## stat22      -5.690e-02  5.721e-02  -0.994 0.320025    
## stat23       1.934e-01  5.711e-02   3.387 0.000711 ***
## stat24      -1.190e-01  5.738e-02  -2.075 0.038062 *  
## stat25      -1.657e-01  5.720e-02  -2.896 0.003792 ** 
## stat26      -9.415e-02  5.743e-02  -1.639 0.101175    
## stat27       3.320e-02  5.753e-02   0.577 0.563933    
## stat28      -2.306e-03  5.735e-02  -0.040 0.967928    
## stat29       5.450e-02  5.747e-02   0.948 0.342970    
## stat30       2.852e-02  5.751e-02   0.496 0.620012    
## stat31      -2.857e-02  5.775e-02  -0.495 0.620799    
## stat32       9.192e-03  5.748e-02   0.160 0.872968    
## stat33      -7.215e-02  5.726e-02  -1.260 0.207739    
## stat34       6.166e-02  5.741e-02   1.074 0.282792    
## stat35      -1.004e-01  5.724e-02  -1.754 0.079403 .  
## stat36      -3.296e-02  5.711e-02  -0.577 0.563914    
## stat37      -4.599e-02  5.780e-02  -0.796 0.426233    
## stat38       1.493e-01  5.704e-02   2.617 0.008896 ** 
## stat39      -1.509e-01  5.715e-02  -2.641 0.008286 ** 
## stat40      -2.150e-02  5.769e-02  -0.373 0.709364    
## stat41      -1.685e-01  5.721e-02  -2.945 0.003248 ** 
## stat42      -5.188e-02  5.708e-02  -0.909 0.363425    
## stat43      -7.581e-02  5.704e-02  -1.329 0.183828    
## stat44       2.881e-02  5.738e-02   0.502 0.615649    
## stat45      -1.036e-01  5.760e-02  -1.798 0.072188 .  
## stat46       7.042e-02  5.749e-02   1.225 0.220607    
## stat47       9.264e-02  5.775e-02   1.604 0.108725    
## stat48       9.483e-02  5.738e-02   1.653 0.098467 .  
## stat49       1.870e-02  5.703e-02   0.328 0.742957    
## stat50       9.248e-02  5.661e-02   1.633 0.102424    
## stat51       9.265e-02  5.727e-02   1.618 0.105743    
## stat52       5.429e-02  5.746e-02   0.945 0.344844    
## stat53      -8.941e-02  5.795e-02  -1.543 0.122908    
## stat54      -1.230e-01  5.779e-02  -2.128 0.033376 *  
## stat55       9.500e-02  5.694e-02   1.668 0.095325 .  
## stat56       8.579e-03  5.771e-02   0.149 0.881822    
## stat57      -4.550e-02  5.700e-02  -0.798 0.424773    
## stat58      -1.330e-02  5.657e-02  -0.235 0.814165    
## stat59       8.249e-02  5.740e-02   1.437 0.150740    
## stat60       2.279e-01  5.770e-02   3.950 7.91e-05 ***
## stat61      -1.024e-01  5.739e-02  -1.783 0.074566 .  
## stat62      -8.550e-02  5.709e-02  -1.498 0.134260    
## stat63       5.465e-02  5.769e-02   0.947 0.343588    
## stat64       1.903e-02  5.678e-02   0.335 0.737509    
## stat65      -7.236e-02  5.723e-02  -1.264 0.206194    
## stat66       5.825e-02  5.773e-02   1.009 0.312985    
## stat67       2.151e-02  5.761e-02   0.373 0.708863    
## stat68      -3.366e-02  5.787e-02  -0.582 0.560868    
## stat69      -3.735e-02  5.749e-02  -0.650 0.515897    
## stat70       2.902e-02  5.696e-02   0.509 0.610433    
## stat71       4.596e-02  5.724e-02   0.803 0.421978    
## stat72      -4.214e-02  5.757e-02  -0.732 0.464223    
## stat73       6.503e-02  5.798e-02   1.122 0.262063    
## stat74       2.143e-02  5.759e-02   0.372 0.709816    
## stat75       1.930e-03  5.754e-02   0.034 0.973240    
## stat76       1.520e-02  5.737e-02   0.265 0.791120    
## stat77       7.852e-02  5.735e-02   1.369 0.171004    
## stat78      -7.465e-02  5.739e-02  -1.301 0.193408    
## stat79       3.755e-02  5.708e-02   0.658 0.510649    
## stat80       5.460e-02  5.786e-02   0.944 0.345409    
## stat81       7.481e-02  5.775e-02   1.295 0.195236    
## stat82       1.219e-02  5.727e-02   0.213 0.831417    
## stat83      -1.067e-02  5.717e-02  -0.187 0.851965    
## stat84      -5.365e-02  5.722e-02  -0.937 0.348560    
## stat85      -1.322e-01  5.750e-02  -2.298 0.021571 *  
## stat86       1.507e-02  5.767e-02   0.261 0.793921    
## stat87      -4.472e-02  5.769e-02  -0.775 0.438277    
## stat88      -4.215e-02  5.725e-02  -0.736 0.461664    
## stat89      -5.042e-02  5.707e-02  -0.883 0.377022    
## stat90      -3.843e-02  5.746e-02  -0.669 0.503592    
## stat91      -1.144e-01  5.672e-02  -2.017 0.043743 *  
## stat92      -6.060e-02  5.723e-02  -1.059 0.289731    
## stat93      -2.720e-02  5.814e-02  -0.468 0.639924    
## stat94      -9.964e-03  5.747e-02  -0.173 0.862350    
## stat95       4.884e-02  5.732e-02   0.852 0.394184    
## stat96      -1.285e-01  5.719e-02  -2.247 0.024683 *  
## stat97      -3.886e-03  5.704e-02  -0.068 0.945678    
## stat98       8.773e-01  5.629e-02  15.584  < 2e-16 ***
## stat99       9.186e-02  5.749e-02   1.598 0.110143    
## stat100      1.657e-01  5.735e-02   2.889 0.003884 ** 
## stat101      2.713e-02  5.777e-02   0.470 0.638690    
## stat102     -4.829e-03  5.744e-02  -0.084 0.933009    
## stat103     -1.021e-01  5.799e-02  -1.761 0.078222 .  
## stat104     -3.410e-02  5.743e-02  -0.594 0.552720    
## stat105      1.010e-01  5.713e-02   1.768 0.077185 .  
## stat106     -1.248e-01  5.730e-02  -2.178 0.029451 *  
## stat107      1.552e-02  5.719e-02   0.271 0.786133    
## stat108      1.579e-02  5.771e-02   0.274 0.784413    
## stat109      1.877e-02  5.738e-02   0.327 0.743593    
## stat110     -9.104e-01  5.689e-02 -16.003  < 2e-16 ***
## stat111      4.295e-02  5.700e-02   0.754 0.451119    
## stat112      2.393e-02  5.815e-02   0.412 0.680720    
## stat113      4.438e-02  5.777e-02   0.768 0.442406    
## stat114      5.291e-02  5.735e-02   0.923 0.356260    
## stat115      8.263e-02  5.712e-02   1.447 0.148039    
## stat116      1.154e-02  5.783e-02   0.200 0.841848    
## stat117      2.816e-02  5.762e-02   0.489 0.625089    
## stat118     -2.938e-03  5.706e-02  -0.051 0.958944    
## stat119      9.468e-02  5.707e-02   1.659 0.097147 .  
## stat120     -3.730e-02  5.706e-02  -0.654 0.513304    
## stat121      3.962e-02  5.770e-02   0.687 0.492325    
## stat122     -6.359e-02  5.731e-02  -1.110 0.267233    
## stat123      3.142e-02  5.799e-02   0.542 0.587974    
## stat124     -4.830e-02  5.737e-02  -0.842 0.399888    
## stat125      2.816e-02  5.747e-02   0.490 0.624103    
## stat126      1.290e-02  5.698e-02   0.226 0.820865    
## stat127     -3.125e-02  5.731e-02  -0.545 0.585555    
## stat128     -1.396e-01  5.721e-02  -2.440 0.014721 *  
## stat129     -1.417e-02  5.714e-02  -0.248 0.804114    
## stat130      1.154e-01  5.739e-02   2.010 0.044454 *  
## stat131     -5.255e-02  5.746e-02  -0.915 0.360455    
## stat132     -8.327e-02  5.682e-02  -1.465 0.142850    
## stat133      2.781e-02  5.771e-02   0.482 0.629832    
## stat134     -3.907e-02  5.693e-02  -0.686 0.492541    
## stat135     -5.797e-02  5.746e-02  -1.009 0.313125    
## stat136     -6.661e-02  5.782e-02  -1.152 0.249365    
## stat137      6.565e-02  5.686e-02   1.155 0.248329    
## stat138     -1.572e-02  5.708e-02  -0.275 0.782951    
## stat139      2.911e-02  5.750e-02   0.506 0.612715    
## stat140     -1.775e-02  5.708e-02  -0.311 0.755764    
## stat141      6.151e-02  5.687e-02   1.082 0.279516    
## stat142      2.939e-02  5.810e-02   0.506 0.612940    
## stat143      1.486e-02  5.723e-02   0.260 0.795126    
## stat144      1.481e-01  5.680e-02   2.607 0.009167 ** 
## stat145      5.317e-02  5.832e-02   0.912 0.361909    
## stat146     -1.289e-01  5.786e-02  -2.228 0.025942 *  
## stat147     -5.193e-02  5.813e-02  -0.893 0.371700    
## stat148     -2.531e-02  5.664e-02  -0.447 0.654998    
## stat149     -1.977e-01  5.807e-02  -3.404 0.000668 ***
## stat150     -2.637e-02  5.800e-02  -0.455 0.649356    
## stat151      7.341e-03  5.848e-02   0.126 0.900106    
## stat152     -2.164e-02  5.694e-02  -0.380 0.703868    
## stat153      4.985e-02  5.813e-02   0.858 0.391170    
## stat154      8.882e-02  5.786e-02   1.535 0.124849    
## stat155      6.200e-02  5.744e-02   1.079 0.280515    
## stat156      7.208e-02  5.782e-02   1.247 0.212571    
## stat157     -2.530e-02  5.728e-02  -0.442 0.658687    
## stat158      5.211e-02  5.840e-02   0.892 0.372257    
## stat159      1.792e-02  5.725e-02   0.313 0.754245    
## stat160      3.344e-05  5.764e-02   0.001 0.999537    
## stat161      1.597e-02  5.776e-02   0.277 0.782156    
## stat162     -1.096e-02  5.697e-02  -0.192 0.847435    
## stat163      3.388e-02  5.791e-02   0.585 0.558579    
## stat164     -3.465e-03  5.770e-02  -0.060 0.952112    
## stat165      4.378e-02  5.693e-02   0.769 0.441876    
## stat166     -4.773e-02  5.670e-02  -0.842 0.399934    
## stat167     -1.066e-01  5.718e-02  -1.865 0.062219 .  
## stat168     -2.203e-02  5.720e-02  -0.385 0.700100    
## stat169      3.950e-02  5.777e-02   0.684 0.494191    
## stat170      1.655e-02  5.749e-02   0.288 0.773499    
## stat171     -6.160e-02  5.809e-02  -1.060 0.288972    
## stat172      1.207e-01  5.683e-02   2.124 0.033712 *  
## stat173     -3.574e-02  5.781e-02  -0.618 0.536423    
## stat174      3.174e-02  5.706e-02   0.556 0.578108    
## stat175     -7.213e-02  5.755e-02  -1.253 0.210093    
## stat176     -4.151e-02  5.719e-02  -0.726 0.467970    
## stat177     -1.082e-01  5.761e-02  -1.879 0.060306 .  
## stat178      1.797e-02  5.830e-02   0.308 0.757906    
## stat179      3.178e-02  5.729e-02   0.555 0.579083    
## stat180      3.506e-02  5.674e-02   0.618 0.536667    
## stat181      3.230e-02  5.761e-02   0.561 0.575045    
## stat182      5.589e-02  5.779e-02   0.967 0.333519    
## stat183      4.603e-02  5.722e-02   0.804 0.421254    
## stat184     -7.994e-03  5.791e-02  -0.138 0.890205    
## stat185      1.261e-02  5.715e-02   0.221 0.825358    
## stat186      1.683e-02  5.783e-02   0.291 0.771062    
## stat187     -4.205e-02  5.711e-02  -0.736 0.461587    
## stat188     -1.243e-02  5.708e-02  -0.218 0.827639    
## stat189     -7.784e-02  5.742e-02  -1.355 0.175335    
## stat190     -8.823e-02  5.723e-02  -1.542 0.123204    
## stat191     -9.211e-02  5.730e-02  -1.607 0.108009    
## stat192      1.026e-02  5.796e-02   0.177 0.859524    
## stat193      5.146e-02  5.802e-02   0.887 0.375108    
## stat194     -4.512e-02  5.723e-02  -0.788 0.430486    
## stat195      2.419e-02  5.718e-02   0.423 0.672307    
## stat196     -2.180e-02  5.796e-02  -0.376 0.706896    
## stat197     -4.581e-02  5.672e-02  -0.808 0.419272    
## stat198     -8.382e-02  5.739e-02  -1.460 0.144220    
## stat199      5.015e-02  5.697e-02   0.880 0.378713    
## stat200     -1.498e-02  5.696e-02  -0.263 0.792615    
## stat201     -2.924e-02  5.760e-02  -0.508 0.611696    
## stat202     -4.623e-02  5.818e-02  -0.795 0.426843    
## stat203      3.230e-02  5.698e-02   0.567 0.570863    
## stat204     -6.816e-02  5.728e-02  -1.190 0.234098    
## stat205     -2.033e-02  5.696e-02  -0.357 0.721245    
## stat206     -1.062e-01  5.737e-02  -1.852 0.064136 .  
## stat207      9.038e-02  5.736e-02   1.576 0.115175    
## stat208      3.438e-02  5.758e-02   0.597 0.550506    
## stat209      1.609e-03  5.710e-02   0.028 0.977516    
## stat210     -6.548e-02  5.748e-02  -1.139 0.254675    
## stat211     -3.862e-02  5.730e-02  -0.674 0.500268    
## stat212      4.936e-02  5.731e-02   0.861 0.389135    
## stat213      2.916e-02  5.772e-02   0.505 0.613450    
## stat214     -1.051e-01  5.720e-02  -1.838 0.066121 .  
## stat215     -8.115e-02  5.775e-02  -1.405 0.160041    
## stat216     -4.982e-02  5.733e-02  -0.869 0.384922    
## stat217      7.288e-02  5.754e-02   1.266 0.205398    
## sqrt.x18     7.547e+00  2.169e-01  34.796  < 2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 7.351 on 5471 degrees of freedom
## Multiple R-squared:  0.3562, Adjusted R-squared:  0.328 
## F-statistic: 12.61 on 240 and 5471 DF,  p-value: < 2.2e-16
cd.full2 = plot.diagnostics(model.full2, data.train2)

## [1] "Number of data points that have Cook's D > 4/n: 315"
## [1] "Number of data points that have Cook's D > 1: 0"
# much more normal residuals than before. 
# See if you can check the distribution (boxplots) of the high leverage points and the other points
# High Leverage Plot MMORO ### 
cd.compare  = data.frame(cd = cd.full) %>% mutate(type = ifelse(cd > 4/nrow(data.train),'High','Normal'))
ggplot(data=cd.compare, aes(x=type,y=cd)) +
  geom_boxplot(fill='light blue') +
  scale_y_continuous(trans='log',name="Cook's Distance (log)") +
  theme_light() +
  ggtitle('Distribution of High Leverage Points and Normal  Points')

model.null = lm(grand.mean.formula, data.train)
# summary(model.null)
# plot.diagnostics(model.null, data.train)
model.null2 = lm(grand.mean.formula, data.train2)
# summary(model.null2)
# plot.diagnostics(model.null2, data.train2)

Variable Selection

Basic: http://www.stat.columbia.edu/~martin/W2024/R10.pdf Cross Validation + Other Metrics: http://www.sthda.com/english/articles/37-model-selection-essentials-in-r/154-stepwise-regression-essentials-in-r/

Forward Selection (w/ full train)

Train

if (algo.forward == TRUE){
  t1 = Sys.time()
  
  model.forward = step(model.null, scope=list(lower=model.null, upper=model.full), direction="forward", trace = 0)
  print(summary(model.forward))
  #saveRDS(model.forward,file = "model_forward.rds")
  
  t2 = Sys.time()
  print (paste("Time taken for Forward Selection: ",t2-t1, sep = ""))
  
  plot.diagnostics(model.forward, data.train)
}

Test

if (algo.forward == TRUE){
  test.model(model.forward, data.test, "Forward Selection")
}

Forward Selection (w/ filtered train)

Train

if (algo.forward == TRUE){
  t1 = Sys.time()
  
  model.forward2 = step(model.null2, scope=list(lower=model.null2, upper=model.full2), direction="forward", trace = 0)
  print(summary(model.forward2))
  #saveRDS(model.forward,file = "model_forward.rds")
  
  t2 = Sys.time()
  print (paste("Time taken for Forward Selection: ",t2-t1, sep = ""))
  
  plot.diagnostics(model.forward2, data.train2)
}

Test

if (algo.forward == TRUE){
  test.model(model.forward2, data.test, "Forward Selection (2)")
}

Forward Selection with CV (w/ full train)

Train

if (algo.forward.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   , data = data.train
                                   , method = "leapForward"
                                   , feature.names = feature.names)
  model.forward = returned$model
  id = returned$id
}
## Aggregating results
## Selecting tuning parameters
## Fitting nvmax = 16 on full training set
##     nvmax      RMSE  Rsquared      MAE    RMSESD RsquaredSD     MAESD
## 1       1 10.205848 0.1070230 7.793298 0.4837937 0.02121144 0.2343155
## 2       2  9.978578 0.1456242 7.594919 0.4865059 0.01542500 0.2210630
## 3       3  9.869319 0.1645156 7.472217 0.4757917 0.01700325 0.2032220
## 4       4  9.705345 0.1918124 7.255723 0.4773872 0.01219753 0.1920870
## 5       5  9.623121 0.2056678 7.191041 0.5001426 0.01530975 0.2093101
## 6       6  9.615614 0.2067906 7.190418 0.4962592 0.01340840 0.1969631
## 7       7  9.599094 0.2094876 7.178668 0.4953228 0.01391436 0.2006617
## 8       8  9.581185 0.2124654 7.171670 0.4983385 0.01423833 0.2054035
## 9       9  9.573978 0.2136032 7.160477 0.4946051 0.01182847 0.1967355
## 10     10  9.566457 0.2148079 7.156153 0.4890367 0.01057451 0.1936818
## 11     11  9.567690 0.2147292 7.159764 0.4979287 0.01271238 0.2087616
## 12     12  9.570734 0.2142485 7.161341 0.4950334 0.01178461 0.2061083
## 13     13  9.569555 0.2144817 7.158617 0.4954577 0.01207316 0.2005056
## 14     14  9.569202 0.2145821 7.160108 0.4886390 0.01088688 0.1991866
## 15     15  9.568774 0.2146047 7.157827 0.4818420 0.01068948 0.1909961
## 16     16  9.565261 0.2152305 7.160380 0.4895184 0.01216199 0.1976081
## 17     17  9.567334 0.2149625 7.164737 0.4913768 0.01360563 0.2009011
## 18     18  9.572223 0.2142536 7.165375 0.4935820 0.01405113 0.2001792
## 19     19  9.567220 0.2151634 7.160656 0.4986940 0.01458215 0.2066717
## 20     20  9.571368 0.2145047 7.164174 0.4933182 0.01386590 0.1989496
## 21     21  9.567416 0.2152074 7.159803 0.4941473 0.01404452 0.2018575
## 22     22  9.571406 0.2145969 7.160732 0.4957266 0.01458413 0.2060193
## 23     23  9.575425 0.2139530 7.161713 0.4970140 0.01432900 0.2056816
## 24     24  9.584611 0.2125175 7.168373 0.4956858 0.01421770 0.2002674
## 25     25  9.581901 0.2129427 7.164419 0.4953870 0.01386644 0.1996005
## 26     26  9.583743 0.2127369 7.164793 0.4979161 0.01452431 0.1991570
## 27     27  9.584930 0.2125648 7.166746 0.4946890 0.01388254 0.1933937
## 28     28  9.592303 0.2114755 7.176218 0.4955237 0.01422260 0.1955142
## 29     29  9.594619 0.2111496 7.179263 0.4920210 0.01387720 0.1935187
## 30     30  9.594531 0.2111348 7.181164 0.4886627 0.01374159 0.1944880
## 31     31  9.592214 0.2115019 7.182320 0.4913554 0.01399652 0.1977549
## 32     32  9.596338 0.2108660 7.185186 0.4911173 0.01369816 0.1935786
## 33     33  9.600163 0.2102227 7.185724 0.4825230 0.01295648 0.1864999
## 34     34  9.606077 0.2093563 7.191567 0.4785715 0.01308893 0.1858295
## 35     35  9.612094 0.2084842 7.196839 0.4810674 0.01325224 0.1876467
## 36     36  9.613614 0.2082737 7.198500 0.4810632 0.01301752 0.1870024
## 37     37  9.619007 0.2073933 7.204556 0.4829169 0.01295590 0.1874158
## 38     38  9.620767 0.2071515 7.202086 0.4817776 0.01275396 0.1845823
## 39     39  9.624027 0.2066601 7.202826 0.4837002 0.01330190 0.1858999
## 40     40  9.627022 0.2061970 7.207216 0.4863355 0.01303468 0.1883853
## 41     41  9.632032 0.2054544 7.212336 0.4889318 0.01266462 0.1930146
## 42     42  9.630941 0.2056589 7.212233 0.4931403 0.01329216 0.1964734
## 43     43  9.633113 0.2053342 7.213706 0.4938240 0.01358561 0.2015098
## 44     44  9.636211 0.2049000 7.216841 0.4919803 0.01379320 0.1993277
## 45     45  9.636226 0.2049692 7.214656 0.4913235 0.01389670 0.1957690
## 46     46  9.635844 0.2050912 7.215415 0.4933396 0.01406488 0.1980321
## 47     47  9.633604 0.2054201 7.214172 0.4906287 0.01319585 0.1959893
## 48     48  9.637044 0.2049228 7.219410 0.4913375 0.01316567 0.1964193
## 49     49  9.638142 0.2047848 7.221109 0.4911534 0.01357937 0.1967339
## 50     50  9.639763 0.2045427 7.226550 0.4889001 0.01275524 0.1941188
## 51     51  9.642162 0.2041790 7.231294 0.4897592 0.01275260 0.1914550
## 52     52  9.641711 0.2042536 7.229422 0.4893983 0.01268386 0.1929917
## 53     53  9.646684 0.2035512 7.233248 0.4878977 0.01261278 0.1911499
## 54     54  9.648677 0.2033129 7.235829 0.4883934 0.01266993 0.1927885
## 55     55  9.649797 0.2032089 7.236751 0.4890812 0.01229942 0.1922051
## 56     56  9.651188 0.2030487 7.236308 0.4899266 0.01193685 0.1968934
## 57     57  9.650155 0.2032218 7.235055 0.4903257 0.01211339 0.1973277
## 58     58  9.651303 0.2030737 7.236970 0.4906843 0.01232721 0.1963263
## 59     59  9.654182 0.2026424 7.240821 0.4903826 0.01262102 0.1945804
## 60     60  9.654436 0.2025888 7.239809 0.4903490 0.01308315 0.1950564
## 61     61  9.655319 0.2024802 7.241536 0.4892310 0.01286149 0.1925803
## 62     62  9.654714 0.2025961 7.241085 0.4848763 0.01242700 0.1899459
## 63     63  9.656846 0.2023799 7.242004 0.4865223 0.01273801 0.1908949
## 64     64  9.655101 0.2026612 7.240344 0.4871345 0.01221387 0.1904239
## 65     65  9.656155 0.2025409 7.241273 0.4902918 0.01263194 0.1955224
## 66     66  9.660475 0.2019115 7.244422 0.4882990 0.01266130 0.1922495
## 67     67  9.656662 0.2024549 7.244949 0.4887937 0.01270361 0.1939563
## 68     68  9.658058 0.2022737 7.246564 0.4883509 0.01328513 0.1921142
## 69     69  9.658922 0.2021702 7.248110 0.4905271 0.01372876 0.1955497
## 70     70  9.659542 0.2021113 7.248301 0.4886902 0.01370104 0.1963318
## 71     71  9.658347 0.2023341 7.244360 0.4876363 0.01367430 0.1975858
## 72     72  9.658854 0.2022483 7.243764 0.4886420 0.01369566 0.1974729
## 73     73  9.661497 0.2018938 7.244700 0.4888370 0.01326187 0.1966830
## 74     74  9.656529 0.2026026 7.239153 0.4878159 0.01275733 0.1953722
## 75     75  9.658702 0.2022754 7.240874 0.4851514 0.01277234 0.1918310
## 76     76  9.658091 0.2024022 7.241545 0.4828952 0.01258510 0.1915073
## 77     77  9.655939 0.2027067 7.240276 0.4838051 0.01258347 0.1928427
## 78     78  9.658048 0.2024226 7.244550 0.4850144 0.01280357 0.1943126
## 79     79  9.657404 0.2025575 7.244784 0.4850124 0.01267238 0.1951341
## 80     80  9.658427 0.2024254 7.244542 0.4836787 0.01247573 0.1923515
## 81     81  9.659485 0.2023244 7.246252 0.4828629 0.01259742 0.1928354
## 82     82  9.660974 0.2021109 7.247485 0.4818481 0.01230595 0.1902835
## 83     83  9.658704 0.2025003 7.247328 0.4819665 0.01251089 0.1905502
## 84     84  9.657530 0.2026847 7.246283 0.4795291 0.01229216 0.1889392
## 85     85  9.659587 0.2023891 7.248562 0.4810647 0.01256484 0.1897930
## 86     86  9.664186 0.2017050 7.251962 0.4828956 0.01264515 0.1909245
## 87     87  9.662354 0.2019939 7.249820 0.4852275 0.01263981 0.1927229
## 88     88  9.663933 0.2017990 7.249499 0.4863098 0.01247501 0.1910270
## 89     89  9.662939 0.2019802 7.247378 0.4860883 0.01251219 0.1895289
## 90     90  9.665734 0.2015829 7.248549 0.4835262 0.01263055 0.1871058
## 91     91  9.665991 0.2015795 7.248873 0.4840365 0.01266325 0.1875797
## 92     92  9.667505 0.2013788 7.248042 0.4849326 0.01283964 0.1887931
## 93     93  9.665950 0.2016564 7.247423 0.4820112 0.01261444 0.1857881
## 94     94  9.666648 0.2015315 7.247609 0.4796665 0.01236112 0.1836397
## 95     95  9.666133 0.2016188 7.248163 0.4795595 0.01252299 0.1828430
## 96     96  9.668508 0.2012795 7.251699 0.4809960 0.01250488 0.1853130
## 97     97  9.667582 0.2014083 7.251545 0.4842224 0.01299776 0.1889252
## 98     98  9.670011 0.2010712 7.253864 0.4832461 0.01317296 0.1888060
## 99     99  9.670709 0.2009780 7.253636 0.4816196 0.01319313 0.1874263
## 100   100  9.670032 0.2010958 7.255351 0.4828631 0.01338294 0.1879891
## 101   101  9.673368 0.2006481 7.258961 0.4838703 0.01359388 0.1888615
## 102   102  9.676628 0.2002329 7.262479 0.4833262 0.01358822 0.1876588
## 103   103  9.678442 0.1999516 7.264204 0.4830861 0.01347677 0.1887335
## 104   104  9.678857 0.1998833 7.267099 0.4806289 0.01321732 0.1862020
## 105   105  9.678470 0.1999348 7.266876 0.4769057 0.01264238 0.1824511
## 106   106  9.677522 0.2000656 7.266291 0.4771336 0.01277846 0.1808448
## 107   107  9.679411 0.1997908 7.267970 0.4781202 0.01294369 0.1801825
## 108   108  9.680187 0.1996561 7.268273 0.4764454 0.01276116 0.1778914
## 109   109  9.682271 0.1993993 7.269039 0.4767379 0.01301340 0.1775527
## 110   110  9.681587 0.1995298 7.267626 0.4751738 0.01273248 0.1747409
## 111   111  9.682310 0.1994146 7.268503 0.4769211 0.01251937 0.1772595
## 112   112  9.684076 0.1991975 7.269887 0.4754739 0.01266979 0.1779162
## 113   113  9.683787 0.1992225 7.269759 0.4743589 0.01250701 0.1774515
## 114   114  9.685420 0.1990001 7.271673 0.4741482 0.01259184 0.1779413
## 115   115  9.684459 0.1991474 7.270257 0.4752595 0.01245610 0.1783746
## 116   116  9.683876 0.1992460 7.270317 0.4752343 0.01246474 0.1779957
## 117   117  9.684572 0.1991471 7.270459 0.4732485 0.01261324 0.1777548
## 118   118  9.686065 0.1989184 7.271384 0.4727663 0.01269869 0.1763239
## 119   119  9.687825 0.1986458 7.272408 0.4749034 0.01302683 0.1784585
## 120   120  9.689031 0.1984676 7.273898 0.4764223 0.01290701 0.1802110
## 121   121  9.690871 0.1982483 7.273887 0.4787129 0.01316292 0.1824292
## 122   122  9.690315 0.1983093 7.273085 0.4776322 0.01301405 0.1821575
## 123   123  9.690915 0.1982285 7.273856 0.4776575 0.01331210 0.1829433
## 124   124  9.692273 0.1980551 7.275009 0.4773972 0.01334720 0.1841497
## 125   125  9.691021 0.1982659 7.274540 0.4774782 0.01356103 0.1844282
## 126   126  9.693829 0.1978817 7.276602 0.4777448 0.01349207 0.1854113
## 127   127  9.696093 0.1975454 7.278360 0.4779849 0.01378271 0.1861958
## 128   128  9.695391 0.1976704 7.277285 0.4775272 0.01362427 0.1851510
## 129   129  9.695221 0.1976701 7.278227 0.4771897 0.01368229 0.1858387
## 130   130  9.698052 0.1972581 7.280975 0.4758814 0.01380211 0.1867169
## 131   131  9.697932 0.1973080 7.280105 0.4743183 0.01359515 0.1844420
## 132   132  9.697573 0.1973759 7.278926 0.4728649 0.01342714 0.1831860
## 133   133  9.697401 0.1973846 7.278906 0.4743068 0.01342933 0.1839856
## 134   134  9.696910 0.1974778 7.278286 0.4742292 0.01338273 0.1844546
## 135   135  9.696959 0.1974661 7.277495 0.4732000 0.01311714 0.1825098
## 136   136  9.696925 0.1974640 7.278216 0.4724719 0.01304968 0.1810694
## 137   137  9.696888 0.1974505 7.278334 0.4705115 0.01298777 0.1795606
## 138   138  9.698302 0.1972522 7.279494 0.4695786 0.01270430 0.1787285
## 139   139  9.697894 0.1973415 7.278851 0.4684540 0.01247315 0.1792546
## 140   140  9.698647 0.1972284 7.278525 0.4709878 0.01265352 0.1810533
## 141   141  9.699028 0.1971678 7.279616 0.4699891 0.01258795 0.1782861
## 142   142  9.697721 0.1973756 7.278144 0.4709476 0.01278080 0.1809148
## 143   143  9.697813 0.1973763 7.279461 0.4701423 0.01295181 0.1813189
## 144   144  9.696320 0.1975989 7.278016 0.4717822 0.01293533 0.1828000
## 145   145  9.697164 0.1974823 7.279735 0.4724538 0.01291581 0.1839452
## 146   146  9.696757 0.1975319 7.279894 0.4727043 0.01301269 0.1839774
## 147   147  9.697522 0.1974233 7.280359 0.4743347 0.01311919 0.1839354
## 148   148  9.697810 0.1973641 7.280714 0.4752357 0.01311869 0.1835338
## 149   149  9.698719 0.1972338 7.280051 0.4754688 0.01315388 0.1836875
## 150   150  9.699335 0.1971567 7.280477 0.4742080 0.01306974 0.1812704
## 151   151  9.699759 0.1971078 7.280959 0.4746626 0.01308819 0.1809339
## 152   152  9.699998 0.1970823 7.280026 0.4743606 0.01313730 0.1801692
## 153   153  9.701752 0.1968328 7.283116 0.4740070 0.01317276 0.1804074
## 154   154  9.702899 0.1966583 7.284324 0.4733495 0.01291630 0.1793375
## 155   155  9.703793 0.1965502 7.283976 0.4741710 0.01301890 0.1812781
## 156   156  9.703775 0.1965460 7.285096 0.4747774 0.01302316 0.1822584
## 157   157  9.703450 0.1965923 7.285173 0.4748563 0.01290292 0.1819265
## 158   158  9.704328 0.1964937 7.286487 0.4765217 0.01291713 0.1843893
## 159   159  9.703783 0.1965649 7.286298 0.4770380 0.01280225 0.1851357
## 160   160  9.704398 0.1964542 7.287424 0.4760957 0.01265505 0.1841008
## 161   161  9.706109 0.1962057 7.288332 0.4752314 0.01254779 0.1821907
## 162   162  9.705268 0.1963548 7.288612 0.4759430 0.01260309 0.1825988
## 163   163  9.704030 0.1965368 7.287755 0.4763301 0.01290908 0.1833317
## 164   164  9.704904 0.1963883 7.287910 0.4763954 0.01299455 0.1832794
## 165   165  9.704869 0.1964117 7.288087 0.4755544 0.01299757 0.1821376
## 166   166  9.705688 0.1962994 7.289155 0.4741269 0.01281288 0.1807262
## 167   167  9.706337 0.1962036 7.289309 0.4741046 0.01282971 0.1811031
## 168   168  9.707204 0.1960788 7.290026 0.4730819 0.01273528 0.1797107
## 169   169  9.706935 0.1961201 7.289650 0.4732593 0.01277624 0.1793110
## 170   170  9.708322 0.1959470 7.290944 0.4732781 0.01294239 0.1796435
## 171   171  9.708910 0.1958678 7.291502 0.4738102 0.01300867 0.1804346
## 172   172  9.708427 0.1959376 7.291686 0.4727434 0.01287661 0.1793377
## 173   173  9.708569 0.1959221 7.291863 0.4719962 0.01271900 0.1788254
## 174   174  9.708397 0.1959588 7.292908 0.4726606 0.01264450 0.1804165
## 175   175  9.709438 0.1958127 7.294103 0.4715689 0.01265406 0.1796816
## 176   176  9.709590 0.1957976 7.293703 0.4714751 0.01261296 0.1798166
## 177   177  9.709657 0.1957916 7.293624 0.4711835 0.01253645 0.1792484
## 178   178  9.709536 0.1958346 7.293937 0.4721767 0.01251396 0.1799807
## 179   179  9.709915 0.1957833 7.294399 0.4719072 0.01234469 0.1799463
## 180   180  9.709841 0.1957975 7.294793 0.4729591 0.01247928 0.1806108
## 181   181  9.711130 0.1956171 7.295859 0.4728488 0.01249754 0.1808961
## 182   182  9.711023 0.1956262 7.294984 0.4726369 0.01241838 0.1802296
## 183   183  9.711936 0.1954942 7.295778 0.4715881 0.01228976 0.1787359
## 184   184  9.711634 0.1955393 7.294931 0.4712136 0.01223704 0.1784038
## 185   185  9.711150 0.1956215 7.294378 0.4715668 0.01231755 0.1780308
## 186   186  9.711510 0.1955579 7.294923 0.4707938 0.01235016 0.1779897
## 187   187  9.711915 0.1954933 7.295757 0.4702462 0.01239363 0.1769186
## 188   188  9.711401 0.1955625 7.295951 0.4688170 0.01227004 0.1763871
## 189   189  9.710433 0.1956891 7.295189 0.4687537 0.01225170 0.1772955
## 190   190  9.710355 0.1956976 7.295660 0.4679022 0.01222636 0.1763439
## 191   191  9.710399 0.1956847 7.295994 0.4675537 0.01221692 0.1766712
## 192   192  9.710990 0.1955954 7.296361 0.4679568 0.01223436 0.1773079
## 193   193  9.710467 0.1956765 7.295535 0.4681224 0.01227607 0.1769849
## 194   194  9.710846 0.1956195 7.296352 0.4672094 0.01214289 0.1765839
## 195   195  9.710998 0.1955911 7.296785 0.4679055 0.01217535 0.1769962
## 196   196  9.711544 0.1955069 7.297111 0.4674341 0.01209792 0.1768215
## 197   197  9.711157 0.1955632 7.297203 0.4679034 0.01213014 0.1774002
## 198   198  9.711459 0.1955178 7.297382 0.4676873 0.01212118 0.1780176
## 199   199  9.711763 0.1954646 7.297599 0.4679643 0.01212524 0.1783861
## 200   200  9.712089 0.1954114 7.297899 0.4680368 0.01209371 0.1783917
## 201   201  9.712238 0.1953807 7.298128 0.4677261 0.01196483 0.1782515
## 202   202  9.711818 0.1954365 7.297972 0.4677041 0.01190928 0.1783670
## 203   203  9.712314 0.1953670 7.298174 0.4673987 0.01188081 0.1785224
## 204   204  9.712486 0.1953394 7.298365 0.4675337 0.01185637 0.1785650
## 205   205  9.712728 0.1953120 7.297903 0.4672802 0.01184190 0.1783839
## 206   206  9.712803 0.1953018 7.297587 0.4676906 0.01185774 0.1786131
## 207   207  9.712969 0.1952781 7.297248 0.4676472 0.01190249 0.1789977
## 208   208  9.712667 0.1953237 7.296395 0.4675527 0.01188236 0.1789837
## 209   209  9.712865 0.1952924 7.296892 0.4674923 0.01190615 0.1790308
## 210   210  9.713115 0.1952646 7.297339 0.4672453 0.01197762 0.1788344
## 211   211  9.713154 0.1952639 7.297791 0.4681307 0.01201334 0.1790310
## 212   212  9.713887 0.1951644 7.298022 0.4675123 0.01197150 0.1789742
## 213   213  9.714192 0.1951190 7.298356 0.4672243 0.01199485 0.1787955
## 214   214  9.713905 0.1951595 7.298138 0.4673039 0.01203417 0.1786011
## 215   215  9.714339 0.1950940 7.298358 0.4674120 0.01202189 0.1787510
## 216   216  9.714256 0.1951055 7.298302 0.4678542 0.01204842 0.1789006
## 217   217  9.714555 0.1950657 7.298322 0.4680558 0.01208133 0.1793217
## 218   218  9.714872 0.1950193 7.298600 0.4680072 0.01207305 0.1790553
## 219   219  9.715020 0.1949969 7.298801 0.4680128 0.01202056 0.1790669
## 220   220  9.715131 0.1949851 7.298787 0.4680137 0.01203149 0.1789404
## 221   221  9.714983 0.1950076 7.298457 0.4680670 0.01203679 0.1790190
## 222   222  9.715141 0.1949809 7.298526 0.4681280 0.01203892 0.1789291
## 223   223  9.715082 0.1949882 7.298287 0.4682542 0.01201889 0.1791110
## 224   224  9.714937 0.1950070 7.298181 0.4682870 0.01203512 0.1789567
## 225   225  9.714871 0.1950169 7.298085 0.4683098 0.01205612 0.1789740
## 226   226  9.714810 0.1950258 7.298019 0.4683111 0.01207395 0.1789807
## 227   227  9.714748 0.1950348 7.297978 0.4683425 0.01206339 0.1788580
## 228   228  9.714570 0.1950627 7.297876 0.4682231 0.01203648 0.1788234
## 229   229  9.714722 0.1950428 7.297807 0.4681169 0.01203121 0.1787872
## 230   230  9.714694 0.1950496 7.297892 0.4682335 0.01205692 0.1788896
## 231   231  9.714536 0.1950721 7.297934 0.4683308 0.01207297 0.1788853
## 232   232  9.714408 0.1950926 7.297743 0.4683593 0.01207440 0.1787876
## 233   233  9.714291 0.1951097 7.297615 0.4683927 0.01207864 0.1787479
## 234   234  9.714310 0.1951048 7.297683 0.4683408 0.01206610 0.1786733
## 235   235  9.714420 0.1950895 7.297781 0.4682275 0.01204521 0.1785377
## 236   236  9.714438 0.1950871 7.297812 0.4682926 0.01205669 0.1786077
## 237   237  9.714535 0.1950727 7.297864 0.4683203 0.01206595 0.1786126
## 238   238  9.714524 0.1950741 7.297823 0.4682732 0.01206230 0.1785109
## 239   239  9.714518 0.1950747 7.297805 0.4682435 0.01206008 0.1784736
## 240   240  9.714503 0.1950768 7.297804 0.4682653 0.01206096 0.1784811
##    nvmax
## 16    16
## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'
## Coefficients of final model:

##   (Intercept)            x4            x7            x8            x9 
##  8.915934e+01 -1.285514e-02  3.239585e+00  1.274240e-01  9.675302e-01 
##           x10           x11           x14           x16           x17 
##  3.156069e-01  5.572301e+07 -2.476429e-01  2.785612e-01  4.455166e-01 
##           x21        stat14        stat60        stat98       stat110 
##  3.868375e-02 -2.780175e-01  1.943018e-01  9.511989e-01 -9.339451e-01 
##       stat149      sqrt.x18 
## -2.077902e-01  7.634949e+00

Test

if (algo.forward.caret == TRUE){
    test.model(model.forward, data.test
             ,method = 'leapForward',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,id = id
             ,draw.limits = TRUE)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   109.5   121.8   125.5   125.3   128.9   139.2 
## [1] "leapForward  Test MSE: 89.7272576325162"

Forward Selection with CV (w/ filtered train)

Train

if (algo.forward.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train2
                                   ,method =  "leapForward"
                                   ,feature.names = feature.names)
  model.forward = returned$model
  id = returned$id
}
## Aggregating results
## Selecting tuning parameters
## Fitting nvmax = 25 on full training set
##     nvmax     RMSE  Rsquared      MAE    RMSESD RsquaredSD      MAESD
## 1       1 8.270268 0.1500471 6.662698 0.1634449 0.02514052 0.11664558
## 2       2 7.968058 0.2109679 6.442396 0.1268344 0.02841881 0.10963931
## 3       3 7.845842 0.2351572 6.312228 0.1026749 0.02303243 0.08405260
## 4       4 7.665810 0.2697308 6.108489 0.1030712 0.02687427 0.08085558
## 5       5 7.567646 0.2882837 6.030670 0.1017884 0.02394369 0.06889467
## 6       6 7.533046 0.2946383 6.009967 0.1156649 0.02350019 0.07647690
## 7       7 7.535902 0.2941827 6.019534 0.1327010 0.02479979 0.08794715
## 8       8 7.508355 0.2992123 6.004663 0.1307841 0.02582231 0.07862328
## 9       9 7.494092 0.3018055 5.993913 0.1243981 0.02715968 0.06523736
## 10     10 7.462905 0.3076898 5.979216 0.1372392 0.02736091 0.07700172
## 11     11 7.458523 0.3084401 5.980278 0.1231068 0.02589088 0.07232327
## 12     12 7.449626 0.3101415 5.974128 0.1254919 0.02653904 0.07458857
## 13     13 7.454874 0.3092337 5.979360 0.1242300 0.02825630 0.07645168
## 14     14 7.453214 0.3095297 5.976030 0.1223252 0.02822013 0.07732297
## 15     15 7.449280 0.3102724 5.971021 0.1151265 0.02790063 0.07012068
## 16     16 7.444276 0.3112004 5.970120 0.1154449 0.02691800 0.07071957
## 17     17 7.440095 0.3119942 5.967821 0.1169938 0.02652318 0.06798628
## 18     18 7.440130 0.3120168 5.965993 0.1183647 0.02690668 0.06101065
## 19     19 7.441290 0.3117618 5.967720 0.1168226 0.02674273 0.06442633
## 20     20 7.441318 0.3117632 5.965754 0.1133204 0.02650599 0.06939826
## 21     21 7.440622 0.3118520 5.966512 0.1103750 0.02631120 0.06815638
## 22     22 7.440998 0.3117458 5.969405 0.1191910 0.02662217 0.07874878
## 23     23 7.435117 0.3128166 5.966475 0.1207472 0.02666011 0.08155604
## 24     24 7.432376 0.3133242 5.965317 0.1254714 0.02589745 0.08350975
## 25     25 7.427174 0.3142952 5.964074 0.1294530 0.02629751 0.08569100
## 26     26 7.429442 0.3139101 5.966334 0.1344782 0.02593492 0.08786604
## 27     27 7.427748 0.3142256 5.965396 0.1313313 0.02542731 0.09020651
## 28     28 7.429214 0.3139957 5.962745 0.1334063 0.02596286 0.09184781
## 29     29 7.430217 0.3138036 5.961525 0.1325573 0.02536088 0.09063551
## 30     30 7.440097 0.3121188 5.968205 0.1298426 0.02542237 0.08890297
## 31     31 7.444771 0.3112715 5.977173 0.1272735 0.02532592 0.09180902
## 32     32 7.444878 0.3112641 5.979971 0.1224227 0.02455986 0.09012671
## 33     33 7.445885 0.3110726 5.979526 0.1236085 0.02401712 0.08980373
## 34     34 7.445626 0.3111787 5.978867 0.1253440 0.02489009 0.09395457
## 35     35 7.441395 0.3119534 5.974747 0.1252453 0.02560298 0.09260965
## 36     36 7.440648 0.3120748 5.973807 0.1193399 0.02529007 0.08732982
## 37     37 7.442962 0.3116909 5.975630 0.1216648 0.02562119 0.09095096
## 38     38 7.442412 0.3117821 5.973954 0.1218490 0.02602849 0.09121098
## 39     39 7.442598 0.3117782 5.972167 0.1256833 0.02614503 0.09300098
## 40     40 7.442805 0.3117393 5.970514 0.1228850 0.02563324 0.09046133
## 41     41 7.445802 0.3112128 5.973407 0.1233882 0.02567731 0.08913011
## 42     42 7.449408 0.3105592 5.972936 0.1236915 0.02507956 0.09143774
## 43     43 7.449528 0.3106066 5.971538 0.1230836 0.02516070 0.08765994
## 44     44 7.453206 0.3099597 5.974107 0.1218569 0.02510287 0.08575292
## 45     45 7.453484 0.3099543 5.974780 0.1175544 0.02480624 0.08334188
## 46     46 7.455944 0.3095406 5.976069 0.1152637 0.02464170 0.08321687
## 47     47 7.459377 0.3089597 5.979902 0.1173873 0.02453286 0.08594782
## 48     48 7.461796 0.3085288 5.982447 0.1184955 0.02457746 0.08663813
## 49     49 7.463048 0.3083639 5.985049 0.1181291 0.02438431 0.08742086
## 50     50 7.459353 0.3090088 5.982509 0.1138181 0.02428897 0.08488453
## 51     51 7.462043 0.3085774 5.985365 0.1140168 0.02355532 0.08634531
## 52     52 7.467122 0.3076748 5.990979 0.1157259 0.02325294 0.08632610
## 53     53 7.465251 0.3080268 5.990840 0.1131516 0.02296230 0.08356098
## 54     54 7.465957 0.3079242 5.991513 0.1128910 0.02287662 0.08050085
## 55     55 7.468312 0.3075051 5.995625 0.1099708 0.02284868 0.08148916
## 56     56 7.466986 0.3078223 5.994806 0.1086112 0.02325644 0.07999971
## 57     57 7.469957 0.3073400 5.997396 0.1092637 0.02306459 0.08070797
## 58     58 7.471621 0.3070810 5.999736 0.1074300 0.02222463 0.07838039
## 59     59 7.470825 0.3072585 5.999289 0.1057350 0.02193570 0.07622383
## 60     60 7.473588 0.3068190 6.002796 0.1069777 0.02209823 0.07536617
## 61     61 7.476660 0.3063028 6.005636 0.1082740 0.02216860 0.07834810
## 62     62 7.478868 0.3060146 6.008861 0.1077693 0.02261928 0.07708483
## 63     63 7.479903 0.3058375 6.010008 0.1109536 0.02312981 0.07853411
## 64     64 7.478866 0.3060280 6.008061 0.1119872 0.02284084 0.07982082
## 65     65 7.478129 0.3061734 6.007900 0.1126481 0.02285274 0.07715136
## 66     66 7.479692 0.3059370 6.008461 0.1116280 0.02254632 0.07681134
## 67     67 7.480111 0.3058822 6.009286 0.1109875 0.02266821 0.07567262
## 68     68 7.482207 0.3055123 6.008609 0.1091504 0.02240214 0.07714594
## 69     69 7.486912 0.3046667 6.011446 0.1099665 0.02254978 0.07885999
## 70     70 7.488661 0.3043204 6.013690 0.1101729 0.02300409 0.07762739
## 71     71 7.490095 0.3040953 6.012778 0.1069016 0.02253847 0.07718172
## 72     72 7.487522 0.3045653 6.010362 0.1085314 0.02258771 0.07692798
## 73     73 7.491067 0.3039543 6.011479 0.1102704 0.02257665 0.07627820
## 74     74 7.489593 0.3042320 6.008483 0.1105368 0.02239445 0.07760376
## 75     75 7.489335 0.3042884 6.007267 0.1089574 0.02176076 0.07771407
## 76     76 7.489192 0.3043032 6.007889 0.1080697 0.02175223 0.07743033
## 77     77 7.492471 0.3037168 6.011622 0.1109915 0.02218919 0.07784873
## 78     78 7.491123 0.3039630 6.009826 0.1108366 0.02182508 0.07696699
## 79     79 7.493623 0.3035433 6.012968 0.1133333 0.02191099 0.07965836
## 80     80 7.496128 0.3031225 6.016288 0.1151013 0.02207166 0.08176575
## 81     81 7.497192 0.3029380 6.016782 0.1166381 0.02242902 0.08091844
## 82     82 7.498704 0.3026781 6.018075 0.1172161 0.02259895 0.08069463
## 83     83 7.497677 0.3028554 6.016341 0.1161906 0.02241198 0.07935873
## 84     84 7.498873 0.3026410 6.017504 0.1139553 0.02267429 0.07627491
## 85     85 7.499630 0.3024967 6.018971 0.1133166 0.02221965 0.07589524
## 86     86 7.499632 0.3025001 6.019992 0.1126004 0.02193522 0.07558983
## 87     87 7.498399 0.3027628 6.018779 0.1136247 0.02231431 0.07415636
## 88     88 7.498050 0.3028383 6.018258 0.1138690 0.02286849 0.07634811
## 89     89 7.500039 0.3025096 6.020762 0.1121790 0.02257205 0.07466242
## 90     90 7.500577 0.3024012 6.020266 0.1117078 0.02290882 0.07364084
## 91     91 7.503348 0.3019564 6.022355 0.1108911 0.02308335 0.07379244
## 92     92 7.505298 0.3016400 6.024415 0.1125407 0.02312746 0.07310374
## 93     93 7.506069 0.3015029 6.025474 0.1123178 0.02327143 0.07337420
## 94     94 7.506691 0.3013748 6.026434 0.1112739 0.02325794 0.07181309
## 95     95 7.508718 0.3010343 6.027137 0.1138424 0.02367179 0.07293358
## 96     96 7.508815 0.3010371 6.027991 0.1131569 0.02332011 0.07413567
## 97     97 7.507330 0.3012789 6.025938 0.1134037 0.02350063 0.07533998
## 98     98 7.507389 0.3013046 6.025220 0.1154150 0.02381778 0.07658415
## 99     99 7.507372 0.3013426 6.024486 0.1149714 0.02386084 0.07637061
## 100   100 7.510772 0.3007759 6.026165 0.1156640 0.02419904 0.07460874
## 101   101 7.513989 0.3002482 6.029011 0.1141307 0.02377557 0.07142174
## 102   102 7.514069 0.3002620 6.029178 0.1136962 0.02382378 0.07041645
## 103   103 7.513946 0.3003022 6.029506 0.1120439 0.02342939 0.07034174
## 104   104 7.512351 0.3005781 6.029149 0.1115112 0.02293943 0.06831959
## 105   105 7.511595 0.3007127 6.029099 0.1118315 0.02332256 0.06850534
## 106   106 7.510564 0.3008803 6.028673 0.1099502 0.02313356 0.06833307
## 107   107 7.511653 0.3006873 6.030064 0.1110148 0.02316931 0.06987855
## 108   108 7.512708 0.3005078 6.032113 0.1112512 0.02296502 0.07040914
## 109   109 7.512616 0.3005389 6.032861 0.1112510 0.02291141 0.07102883
## 110   110 7.511458 0.3007619 6.031451 0.1114071 0.02272162 0.07095806
## 111   111 7.512373 0.3006261 6.030977 0.1115426 0.02304710 0.06995038
## 112   112 7.513550 0.3004189 6.032129 0.1091202 0.02266398 0.06817279
## 113   113 7.512971 0.3005303 6.030360 0.1066508 0.02237106 0.06614822
## 114   114 7.514045 0.3003552 6.030793 0.1038330 0.02203353 0.06486954
## 115   115 7.514966 0.3002040 6.030393 0.1074759 0.02265177 0.06609847
## 116   116 7.513226 0.3005122 6.029271 0.1057840 0.02236629 0.06501921
## 117   117 7.513954 0.3003961 6.029629 0.1070904 0.02237677 0.06566215
## 118   118 7.514228 0.3003537 6.029062 0.1048589 0.02228375 0.06501220
## 119   119 7.514534 0.3003191 6.028660 0.1052240 0.02218524 0.06542715
## 120   120 7.514119 0.3004105 6.028813 0.1040377 0.02199159 0.06340077
## 121   121 7.513177 0.3005822 6.027766 0.1030436 0.02171990 0.06352734
## 122   122 7.514933 0.3002861 6.029130 0.1023427 0.02149532 0.06199668
## 123   123 7.514612 0.3003394 6.027733 0.1015061 0.02175213 0.06053374
## 124   124 7.515326 0.3002263 6.027241 0.1019193 0.02160247 0.06175342
## 125   125 7.514628 0.3003542 6.027844 0.1010022 0.02165998 0.06105032
## 126   126 7.513457 0.3005475 6.028666 0.1018181 0.02164524 0.06053405
## 127   127 7.513630 0.3005379 6.028438 0.1019516 0.02164159 0.06010594
## 128   128 7.513752 0.3005187 6.028276 0.1021789 0.02153731 0.06028771
## 129   129 7.515089 0.3003006 6.029891 0.1018954 0.02141692 0.06019921
## 130   130 7.514689 0.3003809 6.028983 0.1013580 0.02156691 0.05917609
## 131   131 7.514708 0.3003753 6.028429 0.1013665 0.02135761 0.05909878
## 132   132 7.516083 0.3001637 6.029147 0.1016555 0.02165498 0.05868222
## 133   133 7.516431 0.3001149 6.028960 0.1011161 0.02149746 0.05780008
## 134   134 7.517690 0.2999097 6.029814 0.1018319 0.02135776 0.05989409
## 135   135 7.517702 0.2999289 6.029074 0.1017248 0.02111938 0.06051169
## 136   136 7.518157 0.2998427 6.030305 0.1014238 0.02102514 0.06108282
## 137   137 7.517757 0.2999297 6.030181 0.1019799 0.02094653 0.06228239
## 138   138 7.517045 0.3000390 6.029667 0.1027932 0.02081884 0.06189751
## 139   139 7.516063 0.3002359 6.028344 0.1026755 0.02070114 0.06199464
## 140   140 7.517522 0.2999763 6.029956 0.1024743 0.02064481 0.06194764
## 141   141 7.519483 0.2996390 6.030881 0.1026182 0.02052642 0.06343300
## 142   142 7.519294 0.2996890 6.031170 0.1021568 0.02065282 0.06277534
## 143   143 7.518823 0.2997632 6.031473 0.1025441 0.02065702 0.06352936
## 144   144 7.519565 0.2996449 6.032798 0.1030805 0.02081231 0.06364566
## 145   145 7.519627 0.2996240 6.031746 0.1015581 0.02066747 0.06296099
## 146   146 7.519517 0.2996472 6.031152 0.1023821 0.02060756 0.06444646
## 147   147 7.518858 0.2997573 6.030186 0.1035761 0.02076447 0.06414505
## 148   148 7.519107 0.2997208 6.030530 0.1038064 0.02060571 0.06468727
## 149   149 7.520547 0.2994740 6.032297 0.1043311 0.02040829 0.06564955
## 150   150 7.518600 0.2998079 6.030471 0.1035622 0.02033565 0.06439091
## 151   151 7.520402 0.2994956 6.031925 0.1040387 0.02045204 0.06483379
## 152   152 7.520202 0.2995470 6.031333 0.1040515 0.02028831 0.06489592
## 153   153 7.520610 0.2995007 6.032067 0.1051110 0.02033285 0.06607826
## 154   154 7.521053 0.2994436 6.032060 0.1065442 0.02032910 0.06692107
## 155   155 7.521313 0.2993986 6.032413 0.1065159 0.02032982 0.06638972
## 156   156 7.521723 0.2993222 6.032689 0.1073371 0.02036017 0.06648148
## 157   157 7.523000 0.2991221 6.033497 0.1079159 0.02049118 0.06626059
## 158   158 7.523528 0.2990500 6.033301 0.1087719 0.02080115 0.06672511
## 159   159 7.523403 0.2990639 6.033945 0.1093470 0.02085636 0.06675413
## 160   160 7.523595 0.2990113 6.033420 0.1081599 0.02066453 0.06661192
## 161   161 7.522453 0.2992062 6.032641 0.1083264 0.02064215 0.06644176
## 162   162 7.521227 0.2994014 6.031580 0.1073415 0.02043135 0.06566283
## 163   163 7.521202 0.2994222 6.031710 0.1073914 0.02023327 0.06620060
## 164   164 7.520897 0.2994872 6.031651 0.1080092 0.02031412 0.06694421
## 165   165 7.519642 0.2997038 6.030683 0.1075389 0.02036778 0.06714883
## 166   166 7.518879 0.2998429 6.030515 0.1077509 0.02041933 0.06721970
## 167   167 7.519845 0.2996643 6.031492 0.1075427 0.02061256 0.06725612
## 168   168 7.519020 0.2998088 6.030716 0.1073489 0.02052365 0.06704497
## 169   169 7.518536 0.2998975 6.030977 0.1073848 0.02052666 0.06691760
## 170   170 7.519101 0.2997918 6.031995 0.1073436 0.02046539 0.06714953
## 171   171 7.519949 0.2996521 6.033029 0.1067274 0.02031049 0.06658036
## 172   172 7.519211 0.2997681 6.032805 0.1067635 0.02043361 0.06591621
## 173   173 7.518766 0.2998333 6.032578 0.1067360 0.02043129 0.06623556
## 174   174 7.518527 0.2998760 6.032518 0.1060126 0.02040578 0.06588960
## 175   175 7.518359 0.2999037 6.032467 0.1065693 0.02033673 0.06580136
## 176   176 7.518244 0.2999244 6.032537 0.1066673 0.02017576 0.06675605
## 177   177 7.518063 0.2999531 6.033107 0.1062654 0.02003956 0.06698244
## 178   178 7.517901 0.2999910 6.033461 0.1066969 0.02000254 0.06719302
## 179   179 7.516961 0.3001535 6.032418 0.1068486 0.02002172 0.06776576
## 180   180 7.517721 0.3000275 6.033410 0.1064531 0.01994890 0.06725342
## 181   181 7.517129 0.3001251 6.032988 0.1067106 0.01999003 0.06696726
## 182   182 7.517032 0.3001428 6.033213 0.1065569 0.01994929 0.06747090
## 183   183 7.516996 0.3001490 6.033505 0.1055645 0.01995687 0.06607726
## 184   184 7.516829 0.3001780 6.033253 0.1059749 0.01993253 0.06670456
## 185   185 7.516247 0.3002906 6.033561 0.1061081 0.01997384 0.06692649
## 186   186 7.515972 0.3003390 6.033279 0.1053207 0.02003371 0.06564276
## 187   187 7.516490 0.3002548 6.033733 0.1057648 0.01986562 0.06603702
## 188   188 7.516941 0.3001723 6.034350 0.1051718 0.01968720 0.06569393
## 189   189 7.516200 0.3003076 6.033551 0.1046592 0.01969228 0.06527864
## 190   190 7.515749 0.3003716 6.033592 0.1045888 0.01983579 0.06505447
## 191   191 7.515807 0.3003524 6.033265 0.1037707 0.01966872 0.06481934
## 192   192 7.516476 0.3002445 6.033829 0.1033629 0.01960414 0.06423199
## 193   193 7.516655 0.3002192 6.034383 0.1035111 0.01968692 0.06377296
## 194   194 7.516327 0.3002808 6.033780 0.1041488 0.01972615 0.06401268
## 195   195 7.516151 0.3003140 6.033395 0.1044211 0.01967452 0.06464955
## 196   196 7.515816 0.3003699 6.032761 0.1047470 0.01984963 0.06440233
## 197   197 7.515835 0.3003686 6.032666 0.1041556 0.01973065 0.06366082
## 198   198 7.516298 0.3002913 6.033040 0.1044994 0.01976181 0.06408948
## 199   199 7.515695 0.3003899 6.032339 0.1044972 0.01973181 0.06394727
## 200   200 7.516052 0.3003316 6.032764 0.1045228 0.01978865 0.06314028
## 201   201 7.516434 0.3002569 6.033279 0.1041104 0.01974387 0.06336424
## 202   202 7.516036 0.3003287 6.032685 0.1037726 0.01982285 0.06308160
## 203   203 7.515731 0.3003807 6.032575 0.1040710 0.01975302 0.06362317
## 204   204 7.515598 0.3004055 6.032077 0.1037319 0.01971876 0.06337082
## 205   205 7.515233 0.3004672 6.031563 0.1038177 0.01976092 0.06353169
## 206   206 7.515352 0.3004440 6.031727 0.1043152 0.01973340 0.06379095
## 207   207 7.515428 0.3004288 6.031680 0.1047298 0.01972795 0.06435322
## 208   208 7.515083 0.3004870 6.031357 0.1044821 0.01969237 0.06448936
## 209   209 7.515555 0.3004112 6.031945 0.1038538 0.01960536 0.06439284
## 210   210 7.515583 0.3004160 6.032081 0.1034344 0.01962462 0.06400060
## 211   211 7.514964 0.3005245 6.031261 0.1033488 0.01962963 0.06386220
## 212   212 7.514951 0.3005284 6.031379 0.1031330 0.01973268 0.06363633
## 213   213 7.514631 0.3005875 6.031082 0.1026906 0.01969156 0.06345399
## 214   214 7.514046 0.3006847 6.030390 0.1026815 0.01971050 0.06334877
## 215   215 7.514162 0.3006612 6.030339 0.1027709 0.01970342 0.06317489
## 216   216 7.514219 0.3006512 6.030363 0.1028697 0.01966107 0.06327584
## 217   217 7.513932 0.3007048 6.030002 0.1030286 0.01965270 0.06324843
## 218   218 7.513932 0.3007022 6.029944 0.1027652 0.01964202 0.06297415
## 219   219 7.513788 0.3007318 6.029779 0.1026667 0.01964564 0.06278031
## 220   220 7.513834 0.3007246 6.029968 0.1024377 0.01959591 0.06292932
## 221   221 7.514031 0.3006899 6.030045 0.1025461 0.01964483 0.06259663
## 222   222 7.514187 0.3006640 6.030012 0.1025661 0.01960866 0.06247228
## 223   223 7.513945 0.3007056 6.029742 0.1026614 0.01961009 0.06258606
## 224   224 7.513930 0.3007118 6.029786 0.1026362 0.01964959 0.06254641
## 225   225 7.513770 0.3007401 6.029577 0.1024963 0.01962453 0.06260621
## 226   226 7.513661 0.3007630 6.029545 0.1026501 0.01968100 0.06263315
## 227   227 7.513504 0.3007871 6.029471 0.1023918 0.01964694 0.06265606
## 228   228 7.513511 0.3007858 6.029552 0.1024293 0.01966040 0.06258867
## 229   229 7.513521 0.3007838 6.029502 0.1020795 0.01965917 0.06235755
## 230   230 7.513227 0.3008357 6.029330 0.1021693 0.01964450 0.06254157
## 231   231 7.513332 0.3008164 6.029465 0.1021261 0.01962689 0.06255784
## 232   232 7.513532 0.3007804 6.029636 0.1020927 0.01962129 0.06254178
## 233   233 7.513489 0.3007880 6.029626 0.1020469 0.01962357 0.06249576
## 234   234 7.513532 0.3007808 6.029729 0.1020161 0.01962047 0.06254948
## 235   235 7.513564 0.3007764 6.029752 0.1019786 0.01963529 0.06253389
## 236   236 7.513592 0.3007697 6.029811 0.1019815 0.01962200 0.06254947
## 237   237 7.513596 0.3007691 6.029771 0.1019959 0.01961401 0.06255203
## 238   238 7.513563 0.3007753 6.029785 0.1018913 0.01961497 0.06243028
## 239   239 7.513563 0.3007754 6.029787 0.1019127 0.01962115 0.06241481
## 240   240 7.513584 0.3007718 6.029796 0.1019202 0.01962246 0.06241429
##    nvmax
## 25    25
## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'
## Coefficients of final model:

##   (Intercept)            x4            x7            x8            x9 
##  8.733025e+01 -1.487732e-02  3.352477e+00  1.410604e-01  9.451043e-01 
##           x10           x11           x16           x17           x21 
##  4.253677e-01  5.771201e+07  2.555669e-01  4.181964e-01  3.566003e-02 
##         stat4        stat13        stat14        stat23        stat25 
## -1.636272e-01 -1.831055e-01 -3.085334e-01  1.948212e-01 -1.437340e-01 
##        stat38        stat41        stat60        stat85        stat98 
##  1.589337e-01 -1.702179e-01  1.948465e-01 -1.431581e-01  8.584054e-01 
##       stat110       stat128       stat144       stat146       stat149 
## -8.961450e-01 -1.604357e-01  1.596797e-01 -1.498132e-01 -2.043875e-01 
##      sqrt.x18 
##  7.454872e+00

Test

if (algo.forward.caret == TRUE){
  test.model(model.forward, data.test
             ,method = 'leapForward',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,id = id
             ,draw.limits = TRUE)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   107.3   120.7   124.3   124.1   127.8   138.1 
## [1] "leapForward  Test MSE: 91.2164226586688"

Backward Elimination

Train

if (algo.backward == TRUE){
  # Takes too much time
  t1 = Sys.time()
  
  model.backward = step(model.full, data = data.train, direction="backward", trace = 0)
  print(summary(model.backward))
  #saveRDS(model.forward,file = "model_backward.rds")
  
  t2 = Sys.time()
  print (paste("Time taken for Backward Elimination: ",t2-t1, sep = ""))
  
  plot.diagnostics(model.backward, data.train)
}

Test

if (algo.backward == TRUE){
  test.model(model.backard, data.test, "Backward Elimination")
}

Backward Elimination with CV (w/ full train)

Train

if (algo.backward.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train
                                   ,method = "leapBackward"
                                   ,feature.names =  feature.names)
  model.backward = returned$model
  id = returned$id
}
## Aggregating results
## Selecting tuning parameters
## Fitting nvmax = 19 on full training set
##     nvmax      RMSE  Rsquared      MAE    RMSESD RsquaredSD     MAESD
## 1       1 10.205848 0.1070230 7.793298 0.4837937 0.02121144 0.2343155
## 2       2  9.978578 0.1456242 7.594919 0.4865059 0.01542500 0.2210630
## 3       3  9.869319 0.1645156 7.472217 0.4757917 0.01700325 0.2032220
## 4       4  9.705345 0.1918124 7.255723 0.4773872 0.01219753 0.1920870
## 5       5  9.623121 0.2056678 7.191041 0.5001426 0.01530975 0.2093101
## 6       6  9.615614 0.2067906 7.190418 0.4962592 0.01340840 0.1969631
## 7       7  9.599094 0.2094876 7.178668 0.4953228 0.01391436 0.2006617
## 8       8  9.581185 0.2124654 7.171670 0.4983385 0.01423833 0.2054035
## 9       9  9.573978 0.2136032 7.160477 0.4946051 0.01182847 0.1967355
## 10     10  9.566457 0.2148079 7.156153 0.4890367 0.01057451 0.1936818
## 11     11  9.569889 0.2143339 7.162311 0.4933912 0.01197829 0.2041986
## 12     12  9.572535 0.2139549 7.164093 0.4913304 0.01123655 0.2012837
## 13     13  9.571512 0.2141605 7.161750 0.4913776 0.01141375 0.1948090
## 14     14  9.569202 0.2145821 7.160108 0.4886390 0.01088688 0.1991866
## 15     15  9.568774 0.2146047 7.157827 0.4818420 0.01068948 0.1909961
## 16     16  9.565261 0.2152305 7.160380 0.4895184 0.01216199 0.1976081
## 17     17  9.567334 0.2149625 7.164737 0.4913768 0.01360563 0.2009011
## 18     18  9.567337 0.2149964 7.164855 0.4874672 0.01361163 0.1994390
## 19     19  9.562362 0.2158963 7.158834 0.4926740 0.01415188 0.2033848
## 20     20  9.571302 0.2145096 7.165829 0.4932075 0.01386646 0.2024829
## 21     21  9.567416 0.2152074 7.159803 0.4941473 0.01404452 0.2018575
## 22     22  9.571406 0.2145969 7.160732 0.4957266 0.01458413 0.2060193
## 23     23  9.575463 0.2139494 7.163199 0.4969331 0.01432135 0.2027895
## 24     24  9.583368 0.2127542 7.167614 0.4983340 0.01469583 0.2017528
## 25     25  9.582286 0.2129563 7.167646 0.4950203 0.01322326 0.2028330
## 26     26  9.590248 0.2117843 7.176497 0.4999595 0.01418107 0.2090227
## 27     27  9.595746 0.2109382 7.182070 0.4973136 0.01393394 0.2086277
## 28     28  9.598859 0.2104611 7.186796 0.4985010 0.01415490 0.2086077
## 29     29  9.594474 0.2111133 7.184776 0.4965284 0.01417496 0.2039828
## 30     30  9.596270 0.2108588 7.183074 0.4922937 0.01378263 0.2003609
## 31     31  9.596107 0.2109165 7.181586 0.4940167 0.01456892 0.1995700
## 32     32  9.599148 0.2104233 7.183031 0.4919426 0.01432897 0.1950352
## 33     33  9.605265 0.2094508 7.187569 0.4850093 0.01363172 0.1890039
## 34     34  9.611314 0.2085646 7.194728 0.4857052 0.01354462 0.1924132
## 35     35  9.617041 0.2077382 7.198578 0.4857869 0.01397109 0.1914110
## 36     36  9.615917 0.2079240 7.196450 0.4812063 0.01334848 0.1876855
## 37     37  9.619457 0.2073523 7.199992 0.4796731 0.01333185 0.1840511
## 38     38  9.624640 0.2065558 7.203557 0.4839502 0.01363285 0.1884245
## 39     39  9.625910 0.2063444 7.205746 0.4843659 0.01366364 0.1855229
## 40     40  9.627995 0.2060514 7.205733 0.4880815 0.01350262 0.1872512
## 41     41  9.629037 0.2059215 7.208334 0.4927490 0.01329750 0.1911551
## 42     42  9.630054 0.2057674 7.209988 0.4917740 0.01310605 0.1928935
## 43     43  9.628893 0.2059596 7.208163 0.4967871 0.01409955 0.1995894
## 44     44  9.630327 0.2057774 7.211031 0.4962014 0.01416086 0.1998333
## 45     45  9.626998 0.2063329 7.207696 0.4929560 0.01347146 0.1956042
## 46     46  9.628639 0.2061246 7.209790 0.4914027 0.01335489 0.1952665
## 47     47  9.628160 0.2062468 7.210923 0.4949744 0.01365686 0.1945632
## 48     48  9.632717 0.2055818 7.216487 0.4980589 0.01385014 0.1971969
## 49     49  9.635753 0.2051288 7.220327 0.4947335 0.01368369 0.1973255
## 50     50  9.639846 0.2044955 7.226929 0.4915806 0.01320375 0.1943038
## 51     51  9.640975 0.2043166 7.228632 0.4902914 0.01260944 0.1915218
## 52     52  9.640461 0.2044317 7.226599 0.4892529 0.01221077 0.1931629
## 53     53  9.647393 0.2034604 7.232237 0.4895990 0.01177643 0.1951494
## 54     54  9.648265 0.2033883 7.235465 0.4905796 0.01197122 0.1973725
## 55     55  9.652948 0.2027337 7.239494 0.4909526 0.01209016 0.1958505
## 56     56  9.650763 0.2031109 7.236628 0.4898040 0.01190569 0.1979289
## 57     57  9.650222 0.2031639 7.235884 0.4903489 0.01206302 0.1978371
## 58     58  9.651924 0.2029351 7.237053 0.4905861 0.01223864 0.1967341
## 59     59  9.655341 0.2024556 7.241885 0.4901514 0.01253511 0.1947968
## 60     60  9.655501 0.2024204 7.240196 0.4862630 0.01212228 0.1924294
## 61     61  9.654554 0.2026047 7.240115 0.4882100 0.01228887 0.1943175
## 62     62  9.657116 0.2022238 7.241827 0.4848064 0.01230450 0.1929553
## 63     63  9.657260 0.2022973 7.242278 0.4860896 0.01244360 0.1911007
## 64     64  9.661599 0.2016982 7.246358 0.4879359 0.01234891 0.1946322
## 65     65  9.662127 0.2016721 7.245871 0.4869920 0.01241064 0.1928998
## 66     66  9.659061 0.2020945 7.245111 0.4858542 0.01193031 0.1918886
## 67     67  9.662760 0.2015754 7.249590 0.4889833 0.01295215 0.1944322
## 68     68  9.663287 0.2015348 7.250182 0.4905008 0.01329477 0.1949880
## 69     69  9.663812 0.2014996 7.249044 0.4920350 0.01387285 0.1946294
## 70     70  9.664004 0.2014832 7.248936 0.4908428 0.01368468 0.1955668
## 71     71  9.665785 0.2012074 7.249414 0.4887005 0.01348347 0.1934958
## 72     72  9.662739 0.2016769 7.245371 0.4902772 0.01352472 0.1948359
## 73     73  9.659178 0.2021951 7.243435 0.4873207 0.01282282 0.1917531
## 74     74  9.659354 0.2021668 7.243297 0.4886869 0.01312672 0.1913921
## 75     75  9.662708 0.2016938 7.247164 0.4873989 0.01310814 0.1928876
## 76     76  9.660173 0.2020983 7.246713 0.4890875 0.01327009 0.1980937
## 77     77  9.657581 0.2025171 7.243160 0.4898856 0.01327790 0.1972201
## 78     78  9.655852 0.2028058 7.243931 0.4897605 0.01299943 0.1976480
## 79     79  9.656670 0.2027133 7.244318 0.4887119 0.01296953 0.1962015
## 80     80  9.656751 0.2026972 7.244483 0.4853217 0.01261353 0.1946568
## 81     81  9.657590 0.2026273 7.242976 0.4838482 0.01262482 0.1952410
## 82     82  9.658435 0.2025202 7.244680 0.4834751 0.01235412 0.1928695
## 83     83  9.662127 0.2019762 7.249442 0.4816685 0.01228831 0.1927000
## 84     84  9.661492 0.2020600 7.249307 0.4776277 0.01170593 0.1900698
## 85     85  9.662195 0.2019749 7.249292 0.4789569 0.01181229 0.1914320
## 86     86  9.664893 0.2016151 7.250713 0.4840073 0.01257959 0.1940915
## 87     87  9.663427 0.2018458 7.249138 0.4799809 0.01217629 0.1897677
## 88     88  9.662724 0.2019658 7.248019 0.4804455 0.01195451 0.1870421
## 89     89  9.662850 0.2019935 7.245907 0.4812640 0.01241939 0.1872390
## 90     90  9.663355 0.2019261 7.245993 0.4847845 0.01294216 0.1874928
## 91     91  9.665010 0.2017166 7.248325 0.4858344 0.01288464 0.1878946
## 92     92  9.664952 0.2017875 7.246514 0.4849091 0.01274396 0.1885734
## 93     93  9.666339 0.2015984 7.247282 0.4823479 0.01256206 0.1859807
## 94     94  9.664331 0.2018923 7.246431 0.4794122 0.01217075 0.1832512
## 95     95  9.665127 0.2017707 7.247941 0.4791639 0.01237723 0.1828138
## 96     96  9.668477 0.2012688 7.251497 0.4815496 0.01256527 0.1859046
## 97     97  9.666978 0.2014895 7.252613 0.4827053 0.01265188 0.1864985
## 98     98  9.667450 0.2014402 7.253908 0.4822723 0.01278868 0.1866056
## 99     99  9.668031 0.2013767 7.253829 0.4824339 0.01313630 0.1871715
## 100   100  9.668550 0.2013107 7.254471 0.4819541 0.01308857 0.1865511
## 101   101  9.671818 0.2008735 7.257933 0.4825595 0.01328047 0.1864006
## 102   102  9.672933 0.2007423 7.261619 0.4794726 0.01308231 0.1835565
## 103   103  9.675260 0.2003845 7.263414 0.4801250 0.01310245 0.1842134
## 104   104  9.678113 0.1999659 7.267006 0.4769741 0.01263651 0.1805598
## 105   105  9.678647 0.1998938 7.267360 0.4776166 0.01280516 0.1795283
## 106   106  9.679320 0.1997921 7.267498 0.4783392 0.01281347 0.1800221
## 107   107  9.679592 0.1997363 7.269018 0.4787866 0.01275670 0.1809555
## 108   108  9.683558 0.1991634 7.270950 0.4779529 0.01267542 0.1801556
## 109   109  9.683632 0.1991690 7.270669 0.4756294 0.01276463 0.1789218
## 110   110  9.684196 0.1991168 7.269010 0.4745476 0.01245626 0.1785191
## 111   111  9.682997 0.1992939 7.267730 0.4761805 0.01249970 0.1797205
## 112   112  9.685357 0.1990080 7.269319 0.4731941 0.01245727 0.1770993
## 113   113  9.684323 0.1991568 7.268978 0.4729659 0.01238861 0.1777692
## 114   114  9.686342 0.1988415 7.269871 0.4714479 0.01231594 0.1777333
## 115   115  9.683585 0.1992545 7.269452 0.4712946 0.01201555 0.1761478
## 116   116  9.683431 0.1992946 7.269243 0.4710566 0.01235242 0.1779592
## 117   117  9.685318 0.1990309 7.270412 0.4706825 0.01221092 0.1757824
## 118   118  9.688069 0.1986369 7.273363 0.4689140 0.01223037 0.1748891
## 119   119  9.689423 0.1984399 7.273737 0.4685242 0.01242421 0.1756839
## 120   120  9.689907 0.1983832 7.273334 0.4709158 0.01255156 0.1773673
## 121   121  9.689990 0.1983714 7.273247 0.4699230 0.01231377 0.1762457
## 122   122  9.691768 0.1980836 7.274326 0.4676699 0.01237105 0.1748183
## 123   123  9.693526 0.1978605 7.277054 0.4688506 0.01266815 0.1778559
## 124   124  9.695159 0.1976130 7.277978 0.4719612 0.01297496 0.1817237
## 125   125  9.695284 0.1976208 7.278316 0.4739066 0.01333068 0.1824749
## 126   126  9.696568 0.1974569 7.280158 0.4732186 0.01338717 0.1821526
## 127   127  9.695982 0.1975423 7.279177 0.4769746 0.01364433 0.1848789
## 128   128  9.695521 0.1976446 7.277701 0.4759199 0.01356594 0.1845989
## 129   129  9.696429 0.1974800 7.279761 0.4757679 0.01338235 0.1846368
## 130   130  9.698805 0.1971310 7.282020 0.4743382 0.01339969 0.1857527
## 131   131  9.699106 0.1971150 7.281626 0.4723020 0.01334117 0.1835792
## 132   132  9.698321 0.1972344 7.280920 0.4719988 0.01340341 0.1824222
## 133   133  9.696322 0.1975384 7.277455 0.4733217 0.01341779 0.1833088
## 134   134  9.696671 0.1974814 7.277331 0.4737549 0.01334922 0.1838861
## 135   135  9.695571 0.1976440 7.275539 0.4746006 0.01332339 0.1842961
## 136   136  9.693744 0.1979135 7.273962 0.4764273 0.01358403 0.1856467
## 137   137  9.694039 0.1978612 7.274334 0.4738326 0.01345097 0.1848245
## 138   138  9.695144 0.1977176 7.276151 0.4720927 0.01312571 0.1817444
## 139   139  9.694893 0.1977780 7.275813 0.4718483 0.01298098 0.1833086
## 140   140  9.696456 0.1975507 7.276073 0.4731874 0.01302806 0.1824830
## 141   141  9.697355 0.1973908 7.277989 0.4733178 0.01298154 0.1821315
## 142   142  9.696811 0.1974788 7.277902 0.4712072 0.01290822 0.1821340
## 143   143  9.696825 0.1974950 7.278978 0.4713992 0.01307407 0.1832113
## 144   144  9.695340 0.1977258 7.278064 0.4720461 0.01302724 0.1828675
## 145   145  9.696820 0.1975326 7.280327 0.4721200 0.01309267 0.1829917
## 146   146  9.697153 0.1974531 7.280674 0.4724268 0.01296248 0.1825087
## 147   147  9.697541 0.1974018 7.280183 0.4737874 0.01315245 0.1834717
## 148   148  9.698023 0.1973398 7.280737 0.4747850 0.01308363 0.1834922
## 149   149  9.699211 0.1971698 7.280399 0.4744261 0.01306115 0.1830394
## 150   150  9.699522 0.1971329 7.280516 0.4730413 0.01304062 0.1808626
## 151   151  9.700466 0.1969917 7.282134 0.4727899 0.01285515 0.1792043
## 152   152  9.701099 0.1969241 7.282114 0.4738467 0.01298404 0.1787989
## 153   153  9.702382 0.1967478 7.283867 0.4735004 0.01302301 0.1809186
## 154   154  9.702715 0.1967085 7.283595 0.4733836 0.01305025 0.1811759
## 155   155  9.703643 0.1966020 7.284516 0.4749992 0.01324381 0.1819034
## 156   156  9.704434 0.1964620 7.285862 0.4746833 0.01305472 0.1824504
## 157   157  9.703582 0.1965975 7.284882 0.4752717 0.01300646 0.1829955
## 158   158  9.704822 0.1964176 7.286453 0.4770487 0.01307778 0.1849310
## 159   159  9.704956 0.1963881 7.286847 0.4769912 0.01289360 0.1849462
## 160   160  9.704922 0.1963727 7.286666 0.4761648 0.01272078 0.1843181
## 161   161  9.706375 0.1961559 7.287400 0.4751833 0.01259623 0.1819531
## 162   162  9.704820 0.1963931 7.287269 0.4759895 0.01258746 0.1826285
## 163   163  9.705742 0.1962494 7.288158 0.4752321 0.01260972 0.1823770
## 164   164  9.706214 0.1961806 7.288503 0.4757525 0.01269116 0.1823876
## 165   165  9.706052 0.1962390 7.288525 0.4759545 0.01283614 0.1820709
## 166   166  9.706635 0.1961672 7.289471 0.4743515 0.01268739 0.1806905
## 167   167  9.706358 0.1962067 7.289008 0.4740090 0.01282082 0.1810591
## 168   168  9.706574 0.1961781 7.289704 0.4730141 0.01286048 0.1797520
## 169   169  9.706744 0.1961576 7.289323 0.4730797 0.01289013 0.1795912
## 170   170  9.707616 0.1960468 7.290655 0.4736049 0.01297671 0.1798373
## 171   171  9.708704 0.1959023 7.291018 0.4738181 0.01301194 0.1806541
## 172   172  9.708512 0.1959377 7.291878 0.4730865 0.01291264 0.1793889
## 173   173  9.708435 0.1959423 7.292168 0.4718825 0.01275255 0.1787457
## 174   174  9.708264 0.1959788 7.293230 0.4725473 0.01267767 0.1803379
## 175   175  9.709217 0.1958498 7.294008 0.4722957 0.01269188 0.1805040
## 176   176  9.709692 0.1957876 7.293682 0.4724831 0.01257195 0.1806462
## 177   177  9.709761 0.1957814 7.293598 0.4721962 0.01249503 0.1800756
## 178   178  9.710165 0.1957496 7.294146 0.4724505 0.01243371 0.1799425
## 179   179  9.710154 0.1957548 7.294527 0.4718398 0.01235600 0.1799241
## 180   180  9.709438 0.1958595 7.293791 0.4729215 0.01241823 0.1809620
## 181   181  9.710757 0.1956829 7.295140 0.4727041 0.01242298 0.1811684
## 182   182  9.710595 0.1956963 7.294920 0.4726998 0.01239907 0.1801088
## 183   183  9.711245 0.1956000 7.294942 0.4718913 0.01227424 0.1797826
## 184   184  9.710962 0.1956467 7.294572 0.4713648 0.01225133 0.1788006
## 185   185  9.710800 0.1956699 7.294267 0.4714679 0.01227641 0.1783845
## 186   186  9.710720 0.1956717 7.294404 0.4711128 0.01232655 0.1786054
## 187   187  9.711552 0.1955414 7.295327 0.4702716 0.01237305 0.1770586
## 188   188  9.711671 0.1955185 7.295777 0.4687530 0.01225462 0.1765300
## 189   189  9.710811 0.1956323 7.295167 0.4686988 0.01224806 0.1773314
## 190   190  9.710782 0.1956394 7.295854 0.4678524 0.01222420 0.1762475
## 191   191  9.710639 0.1956503 7.295885 0.4675023 0.01219775 0.1766837
## 192   192  9.710939 0.1956028 7.296444 0.4679621 0.01223144 0.1773056
## 193   193  9.710324 0.1957035 7.295521 0.4681368 0.01226489 0.1769850
## 194   194  9.710775 0.1956320 7.296402 0.4672165 0.01213767 0.1765839
## 195   195  9.711014 0.1955901 7.296778 0.4679040 0.01217577 0.1769962
## 196   196  9.711544 0.1955069 7.297111 0.4674341 0.01209792 0.1768215
## 197   197  9.711157 0.1955632 7.297203 0.4679034 0.01213014 0.1774002
## 198   198  9.711459 0.1955178 7.297382 0.4676873 0.01212118 0.1780176
## 199   199  9.711900 0.1954421 7.297893 0.4680823 0.01208655 0.1783009
## 200   200  9.712004 0.1954235 7.297919 0.4682057 0.01205710 0.1783415
## 201   201  9.711954 0.1954228 7.297856 0.4677251 0.01197920 0.1782884
## 202   202  9.711901 0.1954229 7.298340 0.4674532 0.01190788 0.1782757
## 203   203  9.712623 0.1953239 7.298382 0.4671891 0.01185634 0.1783254
## 204   204  9.712719 0.1953042 7.298505 0.4673183 0.01182433 0.1784031
## 205   205  9.712703 0.1953109 7.298014 0.4670963 0.01182599 0.1783063
## 206   206  9.712524 0.1953378 7.297391 0.4676790 0.01191285 0.1787110
## 207   207  9.712771 0.1953050 7.297171 0.4676780 0.01194307 0.1790044
## 208   208  9.712553 0.1953393 7.296363 0.4676571 0.01190078 0.1787989
## 209   209  9.712615 0.1953323 7.296752 0.4676331 0.01194507 0.1788300
## 210   210  9.713028 0.1952757 7.297286 0.4670797 0.01196909 0.1785525
## 211   211  9.712817 0.1953113 7.297561 0.4679047 0.01196891 0.1789645
## 212   212  9.713661 0.1951954 7.297866 0.4674915 0.01194017 0.1790801
## 213   213  9.714192 0.1951190 7.298356 0.4672243 0.01199485 0.1787955
## 214   214  9.713905 0.1951595 7.298138 0.4673039 0.01203417 0.1786011
## 215   215  9.714339 0.1950940 7.298358 0.4674120 0.01202189 0.1787510
## 216   216  9.714256 0.1951055 7.298302 0.4678542 0.01204842 0.1789006
## 217   217  9.714555 0.1950657 7.298322 0.4680558 0.01208133 0.1793217
## 218   218  9.714872 0.1950193 7.298600 0.4680072 0.01207305 0.1790553
## 219   219  9.715020 0.1949969 7.298801 0.4680128 0.01202056 0.1790669
## 220   220  9.715131 0.1949851 7.298787 0.4680137 0.01203149 0.1789404
## 221   221  9.714983 0.1950076 7.298457 0.4680670 0.01203679 0.1790190
## 222   222  9.715141 0.1949809 7.298526 0.4681280 0.01203892 0.1789291
## 223   223  9.715056 0.1949918 7.298249 0.4682563 0.01201705 0.1791098
## 224   224  9.715004 0.1949958 7.298306 0.4683078 0.01204532 0.1789677
## 225   225  9.714932 0.1950083 7.298194 0.4683754 0.01207181 0.1791277
## 226   226  9.714810 0.1950258 7.298019 0.4683111 0.01207395 0.1789807
## 227   227  9.714748 0.1950348 7.297978 0.4683425 0.01206339 0.1788580
## 228   228  9.714719 0.1950403 7.297953 0.4682105 0.01204776 0.1788258
## 229   229  9.714697 0.1950467 7.297814 0.4681191 0.01202929 0.1787875
## 230   230  9.714758 0.1950402 7.297953 0.4682279 0.01206153 0.1788916
## 231   231  9.714536 0.1950721 7.297934 0.4683308 0.01207297 0.1788853
## 232   232  9.714408 0.1950926 7.297743 0.4683593 0.01207440 0.1787876
## 233   233  9.714291 0.1951097 7.297615 0.4683927 0.01207864 0.1787479
## 234   234  9.714310 0.1951048 7.297683 0.4683408 0.01206610 0.1786733
## 235   235  9.714420 0.1950895 7.297781 0.4682275 0.01204521 0.1785377
## 236   236  9.714438 0.1950871 7.297812 0.4682926 0.01205669 0.1786077
## 237   237  9.714535 0.1950727 7.297864 0.4683203 0.01206595 0.1786126
## 238   238  9.714524 0.1950741 7.297823 0.4682732 0.01206230 0.1785109
## 239   239  9.714518 0.1950747 7.297805 0.4682435 0.01206008 0.1784736
## 240   240  9.714503 0.1950768 7.297804 0.4682653 0.01206096 0.1784811
##    nvmax
## 19    19
## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'
## Coefficients of final model:

##   (Intercept)            x4            x7            x8            x9 
##  8.941836e+01 -1.276249e-02  3.235631e+00  1.285802e-01  9.650356e-01 
##           x10           x11           x14           x16           x17 
##  3.156375e-01  5.382116e+07 -2.523649e-01  2.760891e-01  4.401452e-01 
##           x21        stat13        stat14        stat24        stat60 
##  3.914063e-02 -1.817820e-01 -2.758714e-01 -1.746312e-01  1.952430e-01 
##        stat98       stat110       stat144       stat149      sqrt.x18 
##  9.490836e-01 -9.372953e-01  1.657180e-01 -2.076103e-01  7.619056e+00

Test

if (algo.backward.caret == TRUE){
  test.model(model.backward, data.test
             ,method = 'leapBackward',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,id = id
             ,draw.limits = TRUE)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   109.1   121.9   125.5   125.3   129.0   140.3 
## [1] "leapBackward  Test MSE: 90.3031382273639"

Backward Elimination with CV (w/ filtered train)

Train

if (algo.backward.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train2
                                   ,method =  "leapBackward"
                                   ,feature.names = feature.names)
  model.backward = returned$model
  id = returned$id
}
## Aggregating results
## Selecting tuning parameters
## Fitting nvmax = 25 on full training set
##     nvmax     RMSE  Rsquared      MAE    RMSESD RsquaredSD      MAESD
## 1       1 8.270268 0.1500471 6.662698 0.1634449 0.02514052 0.11664558
## 2       2 7.968058 0.2109679 6.442396 0.1268344 0.02841881 0.10963931
## 3       3 7.845842 0.2351572 6.312228 0.1026749 0.02303243 0.08405260
## 4       4 7.665810 0.2697308 6.108489 0.1030712 0.02687427 0.08085558
## 5       5 7.567646 0.2882837 6.030670 0.1017884 0.02394369 0.06889467
## 6       6 7.533046 0.2946383 6.009967 0.1156649 0.02350019 0.07647690
## 7       7 7.535902 0.2941827 6.019534 0.1327010 0.02479979 0.08794715
## 8       8 7.508355 0.2992123 6.004663 0.1307841 0.02582231 0.07862328
## 9       9 7.494092 0.3018055 5.993913 0.1243981 0.02715968 0.06523736
## 10     10 7.462905 0.3076898 5.979216 0.1372392 0.02736091 0.07700172
## 11     11 7.458523 0.3084401 5.980278 0.1231068 0.02589088 0.07232327
## 12     12 7.449626 0.3101415 5.974128 0.1254919 0.02653904 0.07458857
## 13     13 7.454874 0.3092337 5.979360 0.1242300 0.02825630 0.07645168
## 14     14 7.453214 0.3095297 5.976030 0.1223252 0.02822013 0.07732297
## 15     15 7.444585 0.3111541 5.968770 0.1215171 0.02886534 0.07060026
## 16     16 7.443081 0.3114727 5.970313 0.1189044 0.02738134 0.06978336
## 17     17 7.438248 0.3122982 5.965056 0.1204191 0.02681958 0.07170486
## 18     18 7.439011 0.3122122 5.967540 0.1175664 0.02705692 0.06333219
## 19     19 7.442122 0.3115874 5.970148 0.1151504 0.02686571 0.06105625
## 20     20 7.444540 0.3111446 5.968736 0.1128034 0.02619355 0.06510858
## 21     21 7.440622 0.3118520 5.966512 0.1103750 0.02631120 0.06815638
## 22     22 7.442251 0.3115015 5.969164 0.1189506 0.02697655 0.07921281
## 23     23 7.437949 0.3123222 5.971178 0.1195624 0.02669437 0.08042746
## 24     24 7.434140 0.3130293 5.967030 0.1255552 0.02619411 0.08448472
## 25     25 7.427174 0.3142952 5.964074 0.1294530 0.02629751 0.08569100
## 26     26 7.431226 0.3135857 5.966708 0.1303466 0.02536487 0.08736138
## 27     27 7.427822 0.3142255 5.965260 0.1311610 0.02542720 0.09038605
## 28     28 7.429214 0.3139957 5.962745 0.1334063 0.02596286 0.09184781
## 29     29 7.431424 0.3135624 5.962761 0.1297904 0.02494089 0.08914026
## 30     30 7.441750 0.3117961 5.970501 0.1259454 0.02486100 0.08599562
## 31     31 7.445583 0.3111120 5.977874 0.1253871 0.02505912 0.09094178
## 32     32 7.446121 0.3110165 5.977999 0.1249858 0.02438610 0.09250046
## 33     33 7.445301 0.3111792 5.976954 0.1250273 0.02418400 0.09163260
## 34     34 7.446546 0.3109881 5.978967 0.1264497 0.02482255 0.09475348
## 35     35 7.441776 0.3118613 5.973790 0.1249731 0.02543126 0.09147111
## 36     36 7.440601 0.3120755 5.973329 0.1193922 0.02529089 0.08766689
## 37     37 7.441438 0.3119327 5.973926 0.1216543 0.02555754 0.09161323
## 38     38 7.441237 0.3119847 5.973344 0.1206545 0.02586364 0.09033567
## 39     39 7.443702 0.3115889 5.974059 0.1235022 0.02572046 0.09159439
## 40     40 7.444618 0.3114191 5.973027 0.1206947 0.02484132 0.08959653
## 41     41 7.443342 0.3116232 5.971038 0.1232758 0.02556667 0.08886533
## 42     42 7.447677 0.3108500 5.969916 0.1238124 0.02510430 0.09102123
## 43     43 7.447273 0.3109650 5.969148 0.1228627 0.02515700 0.08804876
## 44     44 7.450414 0.3104280 5.970952 0.1228812 0.02514916 0.08890584
## 45     45 7.453345 0.3099460 5.973643 0.1177915 0.02515479 0.08436014
## 46     46 7.453751 0.3099096 5.974773 0.1180242 0.02541496 0.08357601
## 47     47 7.453900 0.3099192 5.974828 0.1171634 0.02559531 0.08414982
## 48     48 7.460176 0.3088358 5.980846 0.1193960 0.02539359 0.08825626
## 49     49 7.458241 0.3091736 5.980240 0.1169120 0.02465835 0.08774326
## 50     50 7.458086 0.3092255 5.980311 0.1137444 0.02450535 0.08368574
## 51     51 7.460524 0.3088277 5.981566 0.1127413 0.02364404 0.08443702
## 52     52 7.465055 0.3080614 5.986789 0.1127399 0.02371416 0.08231407
## 53     53 7.465296 0.3080425 5.988935 0.1134049 0.02357415 0.08301551
## 54     54 7.472170 0.3068795 5.996714 0.1107544 0.02299286 0.08027537
## 55     55 7.475868 0.3062642 6.001846 0.1120293 0.02341960 0.08118716
## 56     56 7.473883 0.3066488 6.000088 0.1092365 0.02334091 0.07944026
## 57     57 7.471969 0.3070440 5.999743 0.1080261 0.02264240 0.07666770
## 58     58 7.475036 0.3065419 6.001151 0.1060680 0.02216629 0.07565391
## 59     59 7.472164 0.3070758 5.999577 0.1055306 0.02214947 0.07483212
## 60     60 7.474729 0.3066672 6.001527 0.1053836 0.02251541 0.07327837
## 61     61 7.478230 0.3060490 6.005019 0.1072885 0.02251058 0.07460191
## 62     62 7.480669 0.3056922 6.009906 0.1077276 0.02236505 0.07384589
## 63     63 7.481204 0.3056221 6.011057 0.1086292 0.02256147 0.07307206
## 64     64 7.480801 0.3057051 6.011347 0.1098353 0.02253913 0.07506009
## 65     65 7.479166 0.3060051 6.009918 0.1120246 0.02239136 0.07671213
## 66     66 7.480216 0.3057959 6.009439 0.1120182 0.02229617 0.07766787
## 67     67 7.478825 0.3060437 6.007099 0.1101808 0.02214942 0.07722189
## 68     68 7.480875 0.3056969 6.006979 0.1085868 0.02209887 0.07822039
## 69     69 7.484729 0.3050401 6.008843 0.1096969 0.02279888 0.07973403
## 70     70 7.480678 0.3057420 6.004861 0.1081265 0.02234294 0.07816393
## 71     71 7.483027 0.3053382 6.004790 0.1064108 0.02203522 0.07732623
## 72     72 7.482976 0.3053583 6.006413 0.1078810 0.02204946 0.07859592
## 73     73 7.482595 0.3054271 6.005388 0.1059319 0.02201453 0.07661897
## 74     74 7.483837 0.3052251 6.005435 0.1049521 0.02197694 0.07598431
## 75     75 7.484748 0.3050567 6.006019 0.1064646 0.02162645 0.07539546
## 76     76 7.487586 0.3045650 6.007583 0.1054888 0.02136606 0.07624709
## 77     77 7.487752 0.3045423 6.009507 0.1081814 0.02222531 0.07730344
## 78     78 7.490005 0.3041802 6.011585 0.1124504 0.02200105 0.08274354
## 79     79 7.493171 0.3036050 6.013108 0.1159944 0.02228666 0.08201694
## 80     80 7.493554 0.3035575 6.013235 0.1152690 0.02239749 0.08072814
## 81     81 7.492880 0.3037017 6.012753 0.1162927 0.02304598 0.08118578
## 82     82 7.493149 0.3036770 6.014960 0.1148346 0.02314136 0.08028994
## 83     83 7.493816 0.3035515 6.015127 0.1130597 0.02253932 0.07954361
## 84     84 7.498473 0.3027360 6.018249 0.1110006 0.02237901 0.07653606
## 85     85 7.502141 0.3021105 6.020967 0.1099354 0.02245952 0.07610001
## 86     86 7.501710 0.3021672 6.019673 0.1095919 0.02262022 0.07665509
## 87     87 7.501871 0.3021578 6.021237 0.1099596 0.02307588 0.07576896
## 88     88 7.502050 0.3021357 6.021830 0.1087324 0.02287321 0.07358517
## 89     89 7.504912 0.3016265 6.024632 0.1077560 0.02287624 0.07214764
## 90     90 7.506408 0.3014081 6.023257 0.1098940 0.02312486 0.07332067
## 91     91 7.506526 0.3014133 6.024656 0.1095343 0.02357754 0.07072056
## 92     92 7.506693 0.3013937 6.025015 0.1086677 0.02342937 0.06887131
## 93     93 7.506026 0.3014844 6.024724 0.1089327 0.02271143 0.06977705
## 94     94 7.506865 0.3013203 6.026430 0.1103625 0.02292282 0.07067471
## 95     95 7.508889 0.3009854 6.028271 0.1113812 0.02331627 0.07214292
## 96     96 7.510282 0.3007904 6.029589 0.1128518 0.02308720 0.07422661
## 97     97 7.509852 0.3008828 6.027797 0.1130944 0.02323827 0.07510239
## 98     98 7.511941 0.3005554 6.028083 0.1163155 0.02348072 0.07699452
## 99     99 7.513392 0.3003125 6.028237 0.1168269 0.02334660 0.07779947
## 100   100 7.515310 0.3000093 6.029965 0.1157923 0.02370533 0.07486987
## 101   101 7.517200 0.2997267 6.032006 0.1146205 0.02350807 0.07227026
## 102   102 7.518523 0.2995250 6.034009 0.1135190 0.02324764 0.07042455
## 103   103 7.517254 0.2997201 6.032226 0.1113667 0.02309362 0.06870833
## 104   104 7.515245 0.3000877 6.032112 0.1116959 0.02298904 0.06845710
## 105   105 7.514488 0.3002056 6.032143 0.1102818 0.02299985 0.06923370
## 106   106 7.515067 0.3001279 6.033007 0.1099899 0.02293756 0.07029180
## 107   107 7.514177 0.3003133 6.033009 0.1109935 0.02279674 0.07224012
## 108   108 7.514110 0.3003185 6.032724 0.1114591 0.02293613 0.07192700
## 109   109 7.513273 0.3004550 6.032354 0.1106316 0.02278757 0.07034276
## 110   110 7.511385 0.3007876 6.029884 0.1110685 0.02257911 0.06984393
## 111   111 7.512485 0.3006073 6.030798 0.1111834 0.02292981 0.07004785
## 112   112 7.513823 0.3003851 6.032090 0.1093669 0.02274687 0.06820350
## 113   113 7.512365 0.3006401 6.029321 0.1087288 0.02267028 0.06706054
## 114   114 7.512264 0.3006559 6.028617 0.1084646 0.02251325 0.06774419
## 115   115 7.512725 0.3005805 6.027650 0.1083601 0.02292224 0.06639176
## 116   116 7.511498 0.3008008 6.027000 0.1112489 0.02290050 0.06868617
## 117   117 7.512270 0.3006845 6.028793 0.1100459 0.02267588 0.06752702
## 118   118 7.513411 0.3005039 6.029214 0.1098004 0.02267395 0.06762526
## 119   119 7.513604 0.3004762 6.028007 0.1066377 0.02237542 0.06540439
## 120   120 7.514593 0.3003166 6.028372 0.1042153 0.02200413 0.06420850
## 121   121 7.513811 0.3004596 6.027903 0.1025283 0.02191642 0.06192917
## 122   122 7.515258 0.3001949 6.029880 0.1014389 0.02170668 0.06040279
## 123   123 7.517332 0.2998595 6.031000 0.1034072 0.02210530 0.06063177
## 124   124 7.515873 0.3001221 6.030761 0.1009483 0.02166475 0.06067485
## 125   125 7.515610 0.3001808 6.029782 0.1006348 0.02161850 0.05981324
## 126   126 7.515092 0.3003109 6.029855 0.1026545 0.02154195 0.06063064
## 127   127 7.514734 0.3003767 6.029924 0.1015919 0.02133530 0.06094845
## 128   128 7.515041 0.3003212 6.029430 0.1019733 0.02143998 0.06161469
## 129   129 7.513366 0.3006122 6.027302 0.1017597 0.02129756 0.06132643
## 130   130 7.515584 0.3002402 6.029337 0.1013127 0.02134548 0.05896629
## 131   131 7.517034 0.3000146 6.029808 0.1022771 0.02137029 0.06021820
## 132   132 7.517722 0.2998931 6.030466 0.1017135 0.02143542 0.05905746
## 133   133 7.517219 0.2999718 6.029875 0.1005374 0.02119156 0.05828640
## 134   134 7.516902 0.3000405 6.029788 0.1012673 0.02132374 0.05973755
## 135   135 7.517124 0.3000102 6.029411 0.1013392 0.02109470 0.06041697
## 136   136 7.518035 0.2998729 6.031110 0.1013875 0.02102385 0.06057422
## 137   137 7.517644 0.2999498 6.030696 0.1020740 0.02097237 0.06183591
## 138   138 7.516270 0.3001766 6.029339 0.1029455 0.02087318 0.06193984
## 139   139 7.515536 0.3003313 6.028896 0.1034838 0.02073209 0.06282060
## 140   140 7.516923 0.3000842 6.030539 0.1036188 0.02066566 0.06372622
## 141   141 7.518062 0.2998865 6.031149 0.1025970 0.02053860 0.06440622
## 142   142 7.518806 0.2997621 6.032485 0.1021684 0.02040639 0.06409613
## 143   143 7.518464 0.2998224 6.032120 0.1026156 0.02039682 0.06518039
## 144   144 7.518539 0.2998196 6.031555 0.1028399 0.02074333 0.06432711
## 145   145 7.518937 0.2997566 6.031384 0.1012382 0.02052701 0.06247393
## 146   146 7.519216 0.2997125 6.031307 0.1019752 0.02055811 0.06312701
## 147   147 7.518230 0.2998780 6.029872 0.1033369 0.02063772 0.06328403
## 148   148 7.519361 0.2996896 6.031070 0.1035790 0.02065816 0.06423716
## 149   149 7.520784 0.2994414 6.032792 0.1031724 0.02045969 0.06319620
## 150   150 7.519248 0.2997089 6.031132 0.1031322 0.02040693 0.06354567
## 151   151 7.521098 0.2993892 6.032277 0.1037411 0.02041624 0.06446269
## 152   152 7.520753 0.2994599 6.032190 0.1042047 0.02033006 0.06536267
## 153   153 7.520357 0.2995502 6.031942 0.1049055 0.02030479 0.06626183
## 154   154 7.520715 0.2995027 6.031811 0.1061949 0.02028820 0.06696120
## 155   155 7.521349 0.2994028 6.032584 0.1061160 0.02031935 0.06610253
## 156   156 7.521916 0.2992910 6.033409 0.1071405 0.02036008 0.06710464
## 157   157 7.522612 0.2991837 6.033967 0.1073542 0.02050710 0.06709202
## 158   158 7.522700 0.2991697 6.033912 0.1080933 0.02077781 0.06795445
## 159   159 7.522539 0.2991862 6.033806 0.1087128 0.02087151 0.06764647
## 160   160 7.522835 0.2991363 6.033007 0.1077427 0.02068118 0.06719252
## 161   161 7.522430 0.2992031 6.032445 0.1083875 0.02070690 0.06671580
## 162   162 7.521755 0.2993230 6.032380 0.1078345 0.02059563 0.06668156
## 163   163 7.521093 0.2994595 6.031561 0.1088158 0.02054414 0.06740983
## 164   164 7.518880 0.2998390 6.030391 0.1091655 0.02047308 0.06811904
## 165   165 7.517715 0.3000240 6.030372 0.1088889 0.02053331 0.06809058
## 166   166 7.518001 0.2999859 6.030802 0.1084778 0.02050835 0.06793515
## 167   167 7.519552 0.2997066 6.032801 0.1073626 0.02062842 0.06714816
## 168   168 7.519574 0.2997032 6.033045 0.1071907 0.02063203 0.06669179
## 169   169 7.518869 0.2998312 6.032718 0.1080092 0.02070083 0.06769471
## 170   170 7.519362 0.2997504 6.033046 0.1071654 0.02043690 0.06729498
## 171   171 7.518966 0.2998156 6.032863 0.1069015 0.02037225 0.06632514
## 172   172 7.519354 0.2997416 6.033477 0.1066949 0.02036840 0.06608949
## 173   173 7.518632 0.2998615 6.032624 0.1065785 0.02034063 0.06613347
## 174   174 7.518506 0.2998816 6.032835 0.1060191 0.02040943 0.06594469
## 175   175 7.518397 0.2998959 6.032399 0.1065667 0.02032326 0.06577087
## 176   176 7.518452 0.2998750 6.032629 0.1065201 0.02012604 0.06675669
## 177   177 7.518141 0.2999341 6.033538 0.1060704 0.02000567 0.06711064
## 178   178 7.517356 0.3000801 6.033049 0.1066418 0.02012656 0.06711536
## 179   179 7.516744 0.3001918 6.032558 0.1063481 0.02009002 0.06705939
## 180   180 7.517322 0.3000978 6.033476 0.1063932 0.02006960 0.06663227
## 181   181 7.517367 0.3000828 6.033366 0.1061341 0.01991936 0.06640776
## 182   182 7.517600 0.3000466 6.033933 0.1056425 0.01996062 0.06635790
## 183   183 7.517268 0.3000974 6.033610 0.1055384 0.01979285 0.06668004
## 184   184 7.516774 0.3001848 6.033570 0.1062547 0.01986365 0.06743086
## 185   185 7.516689 0.3002131 6.033869 0.1054772 0.02005163 0.06577075
## 186   186 7.516927 0.3001668 6.033719 0.1058825 0.02000351 0.06605502
## 187   187 7.517304 0.3001067 6.034072 0.1057429 0.01987918 0.06598830
## 188   188 7.518114 0.2999695 6.034718 0.1049532 0.01974031 0.06547362
## 189   189 7.517413 0.3000919 6.034375 0.1050130 0.01971793 0.06537223
## 190   190 7.517130 0.3001269 6.034653 0.1044322 0.01969764 0.06496985
## 191   191 7.516484 0.3002316 6.034155 0.1042314 0.01971705 0.06426306
## 192   192 7.516329 0.3002585 6.034149 0.1042962 0.01971048 0.06399616
## 193   193 7.516271 0.3002687 6.034142 0.1038466 0.01971664 0.06381055
## 194   194 7.516278 0.3002744 6.033669 0.1042874 0.01972904 0.06437953
## 195   195 7.515962 0.3003362 6.032721 0.1042528 0.01983218 0.06438916
## 196   196 7.516237 0.3002949 6.033204 0.1047050 0.01990886 0.06406693
## 197   197 7.515725 0.3003818 6.032604 0.1041790 0.01979505 0.06353693
## 198   198 7.516321 0.3002861 6.032834 0.1044986 0.01975255 0.06399512
## 199   199 7.515594 0.3004068 6.032229 0.1044989 0.01976125 0.06389486
## 200   200 7.516076 0.3003225 6.032670 0.1044995 0.01980328 0.06327669
## 201   201 7.516390 0.3002663 6.032982 0.1041519 0.01972846 0.06378889
## 202   202 7.516036 0.3003287 6.032685 0.1037726 0.01982285 0.06308160
## 203   203 7.515731 0.3003807 6.032575 0.1040710 0.01975302 0.06362317
## 204   204 7.515598 0.3004055 6.032077 0.1037319 0.01971876 0.06337082
## 205   205 7.515233 0.3004672 6.031563 0.1038177 0.01976092 0.06353169
## 206   206 7.515352 0.3004440 6.031727 0.1043152 0.01973340 0.06379095
## 207   207 7.515428 0.3004288 6.031680 0.1047298 0.01972795 0.06435322
## 208   208 7.515083 0.3004870 6.031357 0.1044821 0.01969237 0.06448936
## 209   209 7.515555 0.3004112 6.031945 0.1038538 0.01960536 0.06439284
## 210   210 7.515583 0.3004160 6.032081 0.1034344 0.01962462 0.06400060
## 211   211 7.514964 0.3005245 6.031261 0.1033488 0.01962963 0.06386220
## 212   212 7.515225 0.3004823 6.031637 0.1030677 0.01970532 0.06369830
## 213   213 7.514648 0.3005807 6.030924 0.1026866 0.01968761 0.06341523
## 214   214 7.514046 0.3006847 6.030390 0.1026815 0.01971050 0.06334877
## 215   215 7.514162 0.3006612 6.030339 0.1027709 0.01970342 0.06317489
## 216   216 7.514219 0.3006512 6.030363 0.1028697 0.01966107 0.06327584
## 217   217 7.513932 0.3007048 6.030002 0.1030286 0.01965270 0.06324843
## 218   218 7.513932 0.3007022 6.029944 0.1027652 0.01964202 0.06297415
## 219   219 7.513788 0.3007318 6.029779 0.1026667 0.01964564 0.06278031
## 220   220 7.513925 0.3007077 6.029979 0.1024915 0.01960282 0.06292582
## 221   221 7.514146 0.3006688 6.030109 0.1026152 0.01965349 0.06257690
## 222   222 7.514234 0.3006541 6.029996 0.1025941 0.01961279 0.06247693
## 223   223 7.513945 0.3007056 6.029742 0.1026614 0.01961009 0.06258606
## 224   224 7.513930 0.3007118 6.029786 0.1026362 0.01964959 0.06254641
## 225   225 7.513770 0.3007401 6.029577 0.1024963 0.01962453 0.06260621
## 226   226 7.513661 0.3007630 6.029545 0.1026501 0.01968100 0.06263315
## 227   227 7.513658 0.3007620 6.029612 0.1024835 0.01965718 0.06261198
## 228   228 7.513582 0.3007738 6.029623 0.1024718 0.01966532 0.06256661
## 229   229 7.513521 0.3007838 6.029502 0.1020795 0.01965917 0.06235755
## 230   230 7.513227 0.3008357 6.029330 0.1021693 0.01964450 0.06254157
## 231   231 7.513332 0.3008164 6.029465 0.1021261 0.01962689 0.06255784
## 232   232 7.513532 0.3007804 6.029636 0.1020927 0.01962129 0.06254178
## 233   233 7.513489 0.3007880 6.029626 0.1020469 0.01962357 0.06249576
## 234   234 7.513532 0.3007808 6.029729 0.1020161 0.01962047 0.06254948
## 235   235 7.513564 0.3007764 6.029752 0.1019786 0.01963529 0.06253389
## 236   236 7.513592 0.3007697 6.029811 0.1019815 0.01962200 0.06254947
## 237   237 7.513596 0.3007691 6.029771 0.1019959 0.01961401 0.06255203
## 238   238 7.513563 0.3007753 6.029785 0.1018913 0.01961497 0.06243028
## 239   239 7.513563 0.3007754 6.029787 0.1019127 0.01962115 0.06241481
## 240   240 7.513584 0.3007718 6.029796 0.1019202 0.01962246 0.06241429
##    nvmax
## 25    25
## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'
## Coefficients of final model:

##   (Intercept)            x4            x7            x8            x9 
##  8.733025e+01 -1.487732e-02  3.352477e+00  1.410604e-01  9.451043e-01 
##           x10           x11           x16           x17           x21 
##  4.253677e-01  5.771201e+07  2.555669e-01  4.181964e-01  3.566003e-02 
##         stat4        stat13        stat14        stat23        stat25 
## -1.636272e-01 -1.831055e-01 -3.085334e-01  1.948212e-01 -1.437340e-01 
##        stat38        stat41        stat60        stat85        stat98 
##  1.589337e-01 -1.702179e-01  1.948465e-01 -1.431581e-01  8.584054e-01 
##       stat110       stat128       stat144       stat146       stat149 
## -8.961450e-01 -1.604357e-01  1.596797e-01 -1.498132e-01 -2.043875e-01 
##      sqrt.x18 
##  7.454872e+00

Test

if (algo.backward.caret == TRUE){
  test.model(model.backward, data.test
             ,method = 'leapBackward',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,id = id
             ,draw.limits = TRUE)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   107.3   120.7   124.3   124.1   127.8   138.1 
## [1] "leapBackward  Test MSE: 91.2164226586688"

Stepwise Selection (w/ full train)

Train

if (algo.stepwise == TRUE){
  t1 = Sys.time()
  
  model.stepwise = step(model.null, scope=list(upper=model.full), data = data.train, direction="both", trace = 0)
  print(summary(model.stepwise))
  #saveRDS(model.stepwise,file = "model_stepwise.rds")
  
  t2 = Sys.time()
  print (paste("Time taken for Stepwise Selection: ",t2-t1, sep = ""))
  
  plot.diagnostics(model.stepwise, data.train)
}

Test

if (algo.stepwise == TRUE){
  test.model(model.stepwise, data.test, "Stepwise Selection")
}

Stepwise Selection (w/ filtered train)

Train

if (algo.stepwise == TRUE){
  t1 = Sys.time()
  
  model.stepwise2 = step(model.null2, scope=list(upper=model.full2), data = data.train2, direction="both", trace = 0)
  print(summary(model.stepwise2))
  #saveRDS(model.forward,file = "model_stepwise.rds")
  
  t2 = Sys.time()
  print (paste("Time taken for Stepwise Selection: ",t2-t1, sep = ""))
  
  plot.diagnostics(model.stepwise2, data.train2)
}

Test

if (algo.stepwise == TRUE){
  test.model(model.stepwise2, data.test, "Stepwise Selection (2)")
}

Stepwise Selection with CV (w/ full train)

Train

if (algo.stepwise.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train
                                   ,method = "leapSeq"
                                   ,feature.names = feature.names)
  model.stepwise = returned$model
  id = returned$id
}
## Aggregating results
## Selecting tuning parameters
## Fitting nvmax = 19 on full training set
##     nvmax      RMSE  Rsquared      MAE    RMSESD RsquaredSD     MAESD
## 1       1 10.205848 0.1070230 7.793298 0.4837937 0.02121144 0.2343155
## 2       2  9.978578 0.1456242 7.594919 0.4865059 0.01542500 0.2210630
## 3       3  9.869319 0.1645156 7.472217 0.4757917 0.01700325 0.2032220
## 4       4  9.705345 0.1918124 7.255723 0.4773872 0.01219753 0.1920870
## 5       5  9.623121 0.2056678 7.191041 0.5001426 0.01530975 0.2093101
## 6       6  9.615614 0.2067906 7.190418 0.4962592 0.01340840 0.1969631
## 7       7  9.599094 0.2094876 7.178668 0.4953228 0.01391436 0.2006617
## 8       8  9.581185 0.2124654 7.171670 0.4983385 0.01423833 0.2054035
## 9       9  9.573978 0.2136032 7.160477 0.4946051 0.01182847 0.1967355
## 10     10  9.566457 0.2148079 7.156153 0.4890367 0.01057451 0.1936818
## 11     11  9.567690 0.2147292 7.159764 0.4979287 0.01271238 0.2087616
## 12     12  9.572535 0.2139549 7.164093 0.4913304 0.01123655 0.2012837
## 13     13  9.571512 0.2141605 7.161750 0.4913776 0.01141375 0.1948090
## 14     14  9.667172 0.1966716 7.239365 0.3651035 0.04963529 0.2150276
## 15     15  9.568774 0.2146047 7.157827 0.4818420 0.01068948 0.1909961
## 16     16  9.565261 0.2152305 7.160380 0.4895184 0.01216199 0.1976081
## 17     17  9.567334 0.2149625 7.164737 0.4913768 0.01360563 0.2009011
## 18     18  9.572223 0.2142536 7.165375 0.4935820 0.01405113 0.2001792
## 19     19  9.562362 0.2158963 7.158834 0.4926740 0.01415188 0.2033848
## 20     20  9.754853 0.1831328 7.325915 0.6508972 0.07112079 0.4009606
## 21     21  9.667648 0.1990850 7.242520 0.7172097 0.05305699 0.4266902
## 22     22  9.571406 0.2145969 7.160732 0.4957266 0.01458413 0.2060193
## 23     23  9.660007 0.1996663 7.233095 0.6284673 0.05219118 0.3544183
## 24     24  9.583368 0.2127542 7.167614 0.4983340 0.01469583 0.2017528
## 25     25  9.677466 0.1954907 7.242970 0.3686926 0.04851463 0.2016169
## 26     26  9.583743 0.2127369 7.164793 0.4979161 0.01452431 0.1991570
## 27     27  9.587283 0.2122448 7.169803 0.4930287 0.01371925 0.1929269
## 28     28  9.665424 0.1988109 7.255504 0.5428599 0.04478033 0.3168007
## 29     29  9.673551 0.1975585 7.255762 0.5365232 0.04296156 0.3206129
## 30     30  9.661499 0.1992733 7.243027 0.4643831 0.03493555 0.2650344
## 31     31  9.761705 0.1833666 7.339327 0.7342722 0.06364207 0.4725557
## 32     32  9.595537 0.2109734 7.183650 0.4915331 0.01370418 0.1941739
## 33     33  9.668088 0.1982384 7.247809 0.4559111 0.03433277 0.2554943
## 34     34  9.692269 0.1943600 7.260484 0.5478561 0.05158621 0.2957152
## 35     35  9.691701 0.1946775 7.266045 0.5572802 0.05041756 0.2596346
## 36     36  9.710728 0.1926632 7.275616 0.6944079 0.04993792 0.4066353
## 37     37  9.890349 0.1622596 7.439056 0.8238863 0.07486707 0.5032664
## 38     38  9.623849 0.2066853 7.201725 0.4821647 0.01334456 0.1844175
## 39     39  9.697088 0.1939183 7.265840 0.5488092 0.04829880 0.2472763
## 40     40  9.806936 0.1765717 7.360640 0.7799452 0.06637559 0.4824487
## 41     41  9.890229 0.1612827 7.444983 0.6939056 0.07524574 0.3823510
## 42     42  9.791108 0.1783603 7.361201 0.6371773 0.05908595 0.4041515
## 43     43  9.630017 0.2057897 7.208405 0.4964906 0.01402942 0.1992664
## 44     44  9.632903 0.2054327 7.213008 0.4954107 0.01434914 0.1968310
## 45     45  9.789790 0.1771470 7.346855 0.3101937 0.05186585 0.2363741
## 46     46  9.698911 0.1938891 7.287534 0.5436979 0.04441081 0.3185793
## 47     47  9.714252 0.1917306 7.284569 0.6234033 0.05090673 0.3391275
## 48     48  9.796262 0.1778664 7.365890 0.6931181 0.06419081 0.3644622
## 49     49  9.767536 0.1816998 7.338819 0.5298542 0.05387138 0.2881042
## 50     50  9.637942 0.2048147 7.226843 0.4913799 0.01284409 0.1944797
## 51     51  9.866829 0.1639003 7.421309 0.3693511 0.06253153 0.2425638
## 52     52  9.642579 0.2040996 7.229097 0.4891556 0.01254095 0.1929687
## 53     53  9.647751 0.2033738 7.233073 0.4870160 0.01192579 0.1912024
## 54     54  9.841854 0.1690102 7.435294 0.5220489 0.05574156 0.3790380
## 55     55  9.890808 0.1624887 7.459213 0.7518592 0.07222948 0.4697268
## 56     56  9.704218 0.1931139 7.290025 0.4706846 0.03202974 0.2534636
## 57     57  9.828787 0.1734083 7.383273 0.7311806 0.06518489 0.4472805
## 58     58  9.651134 0.2030893 7.236450 0.4907549 0.01232460 0.1966152
## 59     59  9.775628 0.1807208 7.364195 0.4938917 0.04466065 0.3269575
## 60     60  9.654436 0.2025888 7.239809 0.4903490 0.01308315 0.1950564
## 61     61  9.800132 0.1771332 7.369460 0.5606545 0.05519135 0.3325896
## 62     62  9.744382 0.1869204 7.316264 0.5297419 0.05234565 0.2862918
## 63     63  9.655512 0.2025883 7.240903 0.4862989 0.01244579 0.1916203
## 64     64  9.654732 0.2027191 7.238955 0.4870720 0.01212953 0.1913291
## 65     65  9.653792 0.2029008 7.240310 0.4892313 0.01245258 0.1930654
## 66     66  9.658758 0.2021530 7.245213 0.4881657 0.01252597 0.1917628
## 67     67  9.658104 0.2022354 7.245916 0.4866751 0.01277515 0.1906355
## 68     68  9.877433 0.1645466 7.451598 0.6966835 0.06504695 0.4641528
## 69     69  9.742879 0.1879545 7.313709 0.6190560 0.05112852 0.3344059
## 70     70  9.657104 0.2024695 7.243922 0.4869482 0.01329759 0.1931608
## 71     71  9.884169 0.1635219 7.449931 0.6887683 0.06623845 0.3837369
## 72     72  9.821163 0.1744610 7.386343 0.6494693 0.06401674 0.3985422
## 73     73  9.813806 0.1745117 7.382073 0.3824512 0.05442309 0.2737273
## 74     74  9.813123 0.1756336 7.368114 0.6595856 0.06529523 0.3603530
## 75     75  9.715944 0.1915015 7.299077 0.4584832 0.03148521 0.2415375
## 76     76  9.658705 0.2023136 7.240721 0.4829924 0.01263009 0.1906733
## 77     77  9.816682 0.1745982 7.378859 0.5700113 0.06508182 0.3083729
## 78     78  9.753947 0.1871649 7.323583 0.7050405 0.05219045 0.4103875
## 79     79  9.728757 0.1899913 7.303279 0.5450891 0.04613288 0.2410723
## 80     80  9.657542 0.2025750 7.245793 0.4857156 0.01273750 0.1928025
## 81     81  9.875736 0.1643392 7.430484 0.5975809 0.06783743 0.3514538
## 82     82  9.661027 0.2021217 7.246509 0.4824492 0.01231761 0.1908359
## 83     83  9.747245 0.1876104 7.317907 0.6211392 0.05231599 0.3389481
## 84     84  9.750423 0.1865795 7.327886 0.5199529 0.05064731 0.2848899
## 85     85  9.662660 0.2019061 7.249560 0.4788297 0.01199038 0.1905972
## 86     86  9.755824 0.1857785 7.328905 0.5204940 0.05021701 0.2858548
## 87     87  9.825018 0.1744072 7.383505 0.6758154 0.06097878 0.3358698
## 88     88  9.836495 0.1720800 7.393056 0.6312501 0.06677987 0.3781959
## 89     89  9.759581 0.1861893 7.332083 0.6453826 0.04794514 0.3308907
## 90     90 10.052085 0.1348123 7.589378 0.7572040 0.07491613 0.5068181
## 91     91  9.742232 0.1883245 7.325446 0.5370037 0.04572747 0.3034232
## 92     92  9.745429 0.1878826 7.325927 0.5356700 0.04555989 0.3039753
## 93     93  9.807475 0.1759672 7.373610 0.5101272 0.05470393 0.3053785
## 94     94  9.726723 0.1899786 7.309848 0.4556845 0.03492317 0.2468652
## 95     95  9.825810 0.1744529 7.397533 0.6588187 0.05686335 0.3788034
## 96     96  9.991005 0.1462823 7.538621 0.7693476 0.07852728 0.4470752
## 97     97  9.747529 0.1874954 7.314033 0.5454636 0.04863922 0.2800521
## 98     98  9.669063 0.2012139 7.253637 0.4850862 0.01340729 0.1900389
## 99     99  9.746222 0.1878342 7.329376 0.5331434 0.04547075 0.3045927
## 100   100  9.859667 0.1679995 7.408045 0.3830575 0.06404970 0.2594193
## 101   101  9.756963 0.1864437 7.327139 0.6171725 0.05169535 0.3399277
## 102   102  9.866159 0.1683681 7.419355 0.5995406 0.06374656 0.3905077
## 103   103  9.756182 0.1863035 7.323709 0.5397656 0.04809870 0.2747757
## 104   104  9.756905 0.1862035 7.327747 0.5367317 0.04806866 0.2732414
## 105   105  9.761557 0.1858510 7.334476 0.6138594 0.05195081 0.3349864
## 106   106  9.775841 0.1845493 7.351114 0.6989651 0.05141410 0.4106269
## 107   107  9.679809 0.1997502 7.267562 0.4782268 0.01318868 0.1801019
## 108   108  9.851841 0.1708671 7.428944 0.6709641 0.06122359 0.3837901
## 109   109  9.929924 0.1562708 7.466421 0.5409825 0.07368042 0.3253191
## 110   110  9.764762 0.1851014 7.334885 0.5363624 0.04762464 0.2666889
## 111   111  9.970696 0.1515506 7.512362 0.7100552 0.07166214 0.4430499
## 112   112  9.769186 0.1848721 7.337602 0.6155445 0.05245223 0.3339394
## 113   113  9.685401 0.1990069 7.269797 0.4747922 0.01218572 0.1776034
## 114   114  9.787946 0.1823924 7.360639 0.6419216 0.04783109 0.3208396
## 115   115  9.766309 0.1849324 7.335572 0.5310433 0.04752834 0.2671718
## 116   116  9.861110 0.1687604 7.413185 0.5586921 0.06530423 0.3351105
## 117   117  9.784356 0.1834331 7.359182 0.6954412 0.05115998 0.4159221
## 118   118  9.849787 0.1717067 7.425200 0.7062931 0.05861239 0.4513249
## 119   119  9.859216 0.1685619 7.419631 0.3957168 0.06058269 0.2794490
## 120   120  9.690980 0.1982116 7.275868 0.4768088 0.01293721 0.1818628
## 121   121  9.767241 0.1857791 7.344439 0.5962003 0.03684435 0.2809068
## 122   122  9.761526 0.1863279 7.328946 0.5791071 0.04462824 0.2979247
## 123   123  9.694173 0.1977654 7.276088 0.4678762 0.01249599 0.1766908
## 124   124  9.814544 0.1782304 7.404891 0.6571133 0.04592424 0.4169079
## 125   125  9.847651 0.1721860 7.413361 0.6197523 0.05146462 0.3203784
## 126   126  9.839247 0.1736511 7.413600 0.6272760 0.05186607 0.3082046
## 127   127  9.860500 0.1702002 7.423877 0.5635256 0.05473517 0.3761397
## 128   128  9.760794 0.1864238 7.341018 0.5297653 0.04316311 0.2495953
## 129   129  9.796126 0.1801342 7.391755 0.5137713 0.04135545 0.3114097
## 130   130  9.699521 0.1970318 7.281778 0.4743573 0.01347134 0.1858399
## 131   131  9.778457 0.1842348 7.354999 0.5936856 0.03688326 0.2851011
## 132   132  9.766839 0.1852907 7.330135 0.3629290 0.03332878 0.1500760
## 133   133  9.741611 0.1895484 7.320819 0.5036780 0.03372860 0.2136497
## 134   134  9.784364 0.1827947 7.365090 0.5599019 0.02983182 0.2608973
## 135   135  9.827382 0.1763932 7.398617 0.6476484 0.04838620 0.3886404
## 136   136  9.765519 0.1854801 7.328292 0.3633627 0.03385954 0.1496003
## 137   137  9.796429 0.1814719 7.377846 0.6352224 0.04078433 0.3737992
## 138   138  9.696567 0.1975065 7.277658 0.4713338 0.01298252 0.1799341
## 139   139  9.728682 0.1910649 7.326020 0.4469149 0.01952404 0.2101222
## 140   140  9.696939 0.1974802 7.275982 0.4713972 0.01280562 0.1814833
## 141   141  9.807709 0.1789570 7.398235 0.6213568 0.04167252 0.3728498
## 142   142  9.778063 0.1824574 7.373620 0.4788266 0.03602759 0.2291808
## 143   143  9.766119 0.1854037 7.329698 0.3633686 0.03431011 0.1543842
## 144   144  9.831968 0.1741302 7.382998 0.3761801 0.04735865 0.2165069
## 145   145  9.760066 0.1872743 7.337412 0.5626570 0.02892796 0.2467934
## 146   146  9.833161 0.1739647 7.385352 0.3754350 0.04726298 0.2159764
## 147   147  9.697506 0.1974042 7.280452 0.4738620 0.01315596 0.1829651
## 148   148  9.934149 0.1582350 7.491004 0.7184225 0.06175763 0.4238277
## 149   149  9.698719 0.1972338 7.280051 0.4754688 0.01315388 0.1836875
## 150   150  9.699335 0.1971567 7.280477 0.4742080 0.01306974 0.1812704
## 151   151  9.765558 0.1858045 7.334896 0.4941589 0.03907042 0.2477996
## 152   152  9.764956 0.1865759 7.339577 0.5631708 0.02888641 0.2447091
## 153   153  9.766799 0.1857046 7.336009 0.5069329 0.03739074 0.2342680
## 154   154  9.759247 0.1870975 7.337645 0.5023504 0.03502383 0.2476759
## 155   155  9.703651 0.1965721 7.284149 0.4740852 0.01306864 0.1814425
## 156   156  9.768743 0.1860513 7.344447 0.5669928 0.02928693 0.2499670
## 157   157  9.864808 0.1683860 7.420514 0.3688602 0.04660355 0.2287535
## 158   158  9.705838 0.1962884 7.287443 0.4765856 0.01304326 0.1847617
## 159   159  9.958070 0.1533992 7.497965 0.4789394 0.05236824 0.2404793
## 160   160  9.823952 0.1766288 7.395495 0.5907209 0.04159159 0.2651377
## 161   161  9.739994 0.1894800 7.337710 0.4525122 0.02016262 0.2124322
## 162   162  9.769925 0.1858826 7.346586 0.5674685 0.02918956 0.2509198
## 163   163  9.851152 0.1710184 7.424737 0.5304903 0.04765679 0.2636172
## 164   164  9.705675 0.1962615 7.288091 0.4750686 0.01285978 0.1824472
## 165   165  9.843073 0.1726173 7.393228 0.3783518 0.04738197 0.2168783
## 166   166  9.706242 0.1962186 7.289184 0.4746018 0.01268388 0.1807184
## 167   167  9.859935 0.1702429 7.439882 0.5770360 0.04063988 0.2988954
## 168   168  9.707296 0.1960547 7.290261 0.4731732 0.01278193 0.1798009
## 169   169  9.797973 0.1797001 7.394705 0.4752098 0.03632498 0.2588401
## 170   170  9.801850 0.1800602 7.385451 0.5618091 0.03069719 0.2678155
## 171   171  9.708704 0.1959023 7.291018 0.4738181 0.01301194 0.1806541
## 172   172  9.777277 0.1843583 7.348222 0.4947615 0.03963994 0.2493232
## 173   173  9.948396 0.1541421 7.505768 0.3469153 0.05176332 0.2296520
## 174   174  9.708264 0.1959788 7.293230 0.4725473 0.01267767 0.1803379
## 175   175  9.799954 0.1804210 7.386034 0.5560915 0.02951421 0.2630787
## 176   176  9.710007 0.1957396 7.294215 0.4718303 0.01251621 0.1797027
## 177   177  9.709761 0.1957814 7.293598 0.4721962 0.01249503 0.1800756
## 178   178  9.709899 0.1957841 7.294202 0.4724907 0.01243319 0.1799258
## 179   179  9.710154 0.1957548 7.294527 0.4718398 0.01235600 0.1799241
## 180   180  9.901042 0.1635531 7.452728 0.4791869 0.04654934 0.2355561
## 181   181  9.711441 0.1955796 7.295954 0.4727161 0.01250022 0.1808782
## 182   182  9.781191 0.1838685 7.353476 0.4951686 0.03979747 0.2515778
## 183   183  9.777688 0.1840650 7.346410 0.3700630 0.03380819 0.1615531
## 184   184  9.766282 0.1862794 7.345501 0.5071169 0.03526456 0.2143516
## 185   185  9.780800 0.1839241 7.352517 0.4936978 0.03959702 0.2503143
## 186   186  9.752263 0.1877879 7.347666 0.4452894 0.02246241 0.2136088
## 187   187  9.847164 0.1724129 7.403973 0.3851670 0.04779406 0.2233413
## 188   188  9.770739 0.1855674 7.353068 0.4994980 0.03537189 0.2517242
## 189   189  9.777217 0.1844820 7.350929 0.5064168 0.03807318 0.2397773
## 190   190  9.767873 0.1860549 7.348075 0.5071745 0.03633485 0.2160583
## 191   191  9.777901 0.1843631 7.351659 0.5053529 0.03813373 0.2386879
## 192   192  9.710990 0.1955954 7.296361 0.4679568 0.01223436 0.1773079
## 193   193  9.710467 0.1956765 7.295535 0.4681224 0.01227607 0.1769849
## 194   194  9.710846 0.1956195 7.296352 0.4672094 0.01214289 0.1765839
## 195   195  9.903584 0.1630927 7.455669 0.4313776 0.05401281 0.2223542
## 196   196  9.747137 0.1889412 7.338040 0.4740632 0.02278413 0.2254485
## 197   197  9.747994 0.1887816 7.339549 0.4752755 0.02340342 0.2289336
## 198   198  9.748421 0.1887136 7.339830 0.4750986 0.02342800 0.2295458
## 199   199  9.711763 0.1954646 7.297599 0.4679643 0.01212524 0.1783861
## 200   200  9.712089 0.1954114 7.297899 0.4680368 0.01209371 0.1783917
## 201   201  9.711954 0.1954228 7.297856 0.4677251 0.01197920 0.1782884
## 202   202  9.843662 0.1733640 7.418321 0.5139677 0.04996301 0.3066963
## 203   203  9.776473 0.1849888 7.354496 0.5579042 0.02813249 0.2397958
## 204   204  9.712793 0.1952966 7.298579 0.4673249 0.01183193 0.1783530
## 205   205  9.846947 0.1729410 7.421109 0.5156754 0.05053357 0.3114863
## 206   206  9.768816 0.1858713 7.349420 0.5049857 0.03545023 0.2154665
## 207   207  9.712665 0.1953247 7.297094 0.4677898 0.01196921 0.1790613
## 208   208  9.777256 0.1848838 7.353368 0.5595925 0.02841093 0.2416870
## 209   209  9.758064 0.1868521 7.352071 0.4413222 0.02422457 0.2187009
## 210   210  9.830309 0.1750146 7.413086 0.4790664 0.04222900 0.2667830
## 211   211  9.713046 0.1952798 7.297714 0.4679258 0.01200062 0.1788622
## 212   212  9.713661 0.1951954 7.297866 0.4674915 0.01194017 0.1790801
## 213   213  9.772635 0.1853827 7.351436 0.5073761 0.03641022 0.2174050
## 214   214  9.713905 0.1951595 7.298138 0.4673039 0.01203417 0.1786011
## 215   215  9.785473 0.1834355 7.357869 0.5101646 0.03924981 0.2494564
## 216   216  9.714256 0.1951055 7.298302 0.4678542 0.01204842 0.1789006
## 217   217  9.786017 0.1829770 7.353938 0.3623824 0.03592279 0.1665485
## 218   218  9.714872 0.1950193 7.298600 0.4680072 0.01207305 0.1790553
## 219   219  9.715020 0.1949969 7.298801 0.4680128 0.01202056 0.1790669
## 220   220  9.715131 0.1949851 7.298787 0.4680137 0.01203149 0.1789404
## 221   221  9.751291 0.1883055 7.340550 0.4755239 0.02334139 0.2300752
## 222   222  9.715141 0.1949809 7.298526 0.4681280 0.01203892 0.1789291
## 223   223  9.715082 0.1949882 7.298287 0.4682542 0.01201889 0.1791110
## 224   224  9.751913 0.1881897 7.341525 0.4761998 0.02368617 0.2322449
## 225   225  9.778261 0.1845123 7.360318 0.5045028 0.03709413 0.2672089
## 226   226  9.777234 0.1850066 7.344146 0.5667707 0.03986442 0.2767922
## 227   227  9.788607 0.1830032 7.362579 0.4952423 0.04049777 0.2655269
## 228   228  9.789297 0.1837771 7.367967 0.6385308 0.04041038 0.3686853
## 229   229  9.775405 0.1852848 7.342174 0.5631779 0.03905983 0.2721519
## 230   230  9.790933 0.1824535 7.358134 0.3592697 0.03733926 0.1729544
## 231   231  9.777436 0.1846706 7.360030 0.5037751 0.03675608 0.2670504
## 232   232  9.714408 0.1950926 7.297743 0.4683593 0.01207440 0.1787876
## 233   233  9.786948 0.1833492 7.358447 0.5125386 0.03943818 0.2523509
## 234   234  9.851246 0.1727760 7.421434 0.5370260 0.05021810 0.3089969
## 235   235  9.985527 0.1508592 7.541721 0.5522606 0.06238269 0.3344816
## 236   236  9.769749 0.1858255 7.350410 0.5048567 0.03510150 0.2175733
## 237   237  9.776497 0.1851027 7.352050 0.5546727 0.02687995 0.2355181
## 238   238  9.790625 0.1825208 7.358714 0.3594360 0.03725063 0.1740700
## 239   239  9.957327 0.1547972 7.521318 0.5365630 0.05751046 0.3178205
## 240   240  9.714503 0.1950768 7.297804 0.4682653 0.01206096 0.1784811
##    nvmax
## 19    19
## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'
## Coefficients of final model:

##   (Intercept)            x4            x7            x8            x9 
##  8.941836e+01 -1.276249e-02  3.235631e+00  1.285802e-01  9.650356e-01 
##           x10           x11           x14           x16           x17 
##  3.156375e-01  5.382116e+07 -2.523649e-01  2.760891e-01  4.401452e-01 
##           x21        stat13        stat14        stat24        stat60 
##  3.914063e-02 -1.817820e-01 -2.758714e-01 -1.746312e-01  1.952430e-01 
##        stat98       stat110       stat144       stat149      sqrt.x18 
##  9.490836e-01 -9.372953e-01  1.657180e-01 -2.076103e-01  7.619056e+00

Test

if (algo.stepwise.caret == TRUE){
  # test.model(model.stepwise, data.test, "Stepwise Selection", draw.limits = TRUE, regsubset = TRUE, id = id, formula = formula)
  test.model(model.stepwise, data.test
             ,method = 'leapSeq',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,id = id
             ,draw.limits = TRUE)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   109.1   121.9   125.5   125.3   129.0   140.3 
## [1] "leapSeq  Test MSE: 90.3031382273639"

Stepwise Selection with CV (w/ filtered train)

Train

Test

LASSO (w/ full train)

Train

if(algo.LASSO == TRUE){
  # Formatting data for GLM net
  # you can use model.matrix as well -- model.matrix creates a design (or model) matrix, 
  # e.g., by expanding factors to a set of dummy variables (depending on the contrasts) 
  # and expanding interactions similarly.
  x = as.matrix(data.train[,feature.names])
  y = data.train[,label.names]
  
  xtest = as.matrix(data.test[,feature.names]) 
  ytest = data.test[,label.names] 
  
  grid=10^seq(10,-2, length =100)
  
  set.seed(1)
  model.LASSO=glmnet(x,y,alpha=1, lambda =grid)
  
  cv.out=cv.glmnet(x,y,alpha=1) # alpha=1 performs LASSO
  plot(cv.out)
  bestlambda<-cv.out$lambda.min  # Optimal penalty parameter.  You can make this call visually.
  
  print(coef(model.LASSO,s=bestlambda))
}

Test

if(algo.LASSO == TRUE){
  lasso.pred=predict (model.LASSO ,s=bestlambda ,newx=xtest)
  
  testMSE_LASSO = mean((ytest-lasso.pred)^2)
  print (paste("LASSO Test RMSE: ",testMSE_LASSO, sep=""))
  
  plot(ytest,lasso.pred)
}

LASSO (w/ filtered train)

Train

if(algo.LASSO == TRUE){
  # Formatting data for GLM net
  # you can use model.matrix as well -- model.matrix creates a design (or model) matrix, 
  # e.g., by expanding factors to a set of dummy variables (depending on the contrasts) 
  # and expanding interactions similarly.
  x = as.matrix(data.train2[,feature.names])
  y = data.train2[,label.names]
  
  xtest = as.matrix(data.test[,feature.names]) 
  ytest = data.test[,label.names] 
  
  grid=10^seq(10,-2, length =100)
  
  set.seed(1)
  model.LASSO=glmnet(x,y,alpha=1, lambda =grid)
  
  cv.out=cv.glmnet(x,y,alpha=1) # alpha=1 performs LASSO
  plot(cv.out)
  bestlambda<-cv.out$lambda.min  # Optimal penalty parameter.  You can make this call visually.
  
  print(coef(model.LASSO,s=bestlambda))
}

Test

if(algo.LASSO == TRUE){
  lasso.pred=predict (model.LASSO ,s=bestlambda ,newx=xtest)  
  
  testMSE_LASSO = mean((ytest-lasso.pred)^2)
  print (paste("LASSO Test RMSE: ",testMSE_LASSO, sep=""))
  
  plot(ytest,lasso.pred)
}

LASSO with CV (w/ full train)

Train

if (algo.LASSO.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train
                                   ,method = "glmnet"
                                   ,subopt = 'LASSO'
                                   ,feature.names = feature.names)
  model.LASSO.caret = returned$model
}
## Aggregating results
## Selecting tuning parameters
## Fitting alpha = 1, lambda = 0.148 on full training set
## glmnet 
## 
## 6002 samples
##  240 predictor
## 
## No pre-processing
## Resampling: Cross-Validated (10 fold) 
## Summary of sample sizes: 5402, 5401, 5402, 5401, 5402, 5402, ... 
## Resampling results across tuning parameters:
## 
##   lambda      RMSE      Rsquared   MAE     
##   0.01000000  9.689801  0.1979179  7.277215
##   0.01047616  9.688690  0.1980487  7.276272
##   0.01097499  9.687532  0.1981853  7.275287
##   0.01149757  9.686324  0.1983284  7.274247
##   0.01204504  9.685068  0.1984773  7.273167
##   0.01261857  9.683763  0.1986324  7.272054
##   0.01321941  9.682404  0.1987946  7.270905
##   0.01384886  9.680973  0.1989665  7.269696
##   0.01450829  9.679481  0.1991464  7.268431
##   0.01519911  9.677929  0.1993340  7.267118
##   0.01592283  9.676311  0.1995302  7.265759
##   0.01668101  9.674625  0.1997356  7.264342
##   0.01747528  9.672881  0.1999487  7.262873
##   0.01830738  9.671097  0.2001668  7.261366
##   0.01917910  9.669260  0.2003918  7.259797
##   0.02009233  9.667395  0.2006205  7.258188
##   0.02104904  9.665479  0.2008560  7.256524
##   0.02205131  9.663527  0.2010962  7.254819
##   0.02310130  9.661512  0.2013453  7.253039
##   0.02420128  9.659419  0.2016057  7.251175
##   0.02535364  9.657257  0.2018758  7.249250
##   0.02656088  9.655027  0.2021555  7.247263
##   0.02782559  9.652733  0.2024446  7.245235
##   0.02915053  9.650369  0.2027437  7.243159
##   0.03053856  9.647938  0.2030530  7.241061
##   0.03199267  9.645423  0.2033751  7.238941
##   0.03351603  9.642841  0.2037081  7.236781
##   0.03511192  9.640180  0.2040543  7.234569
##   0.03678380  9.637461  0.2044105  7.232338
##   0.03853529  9.634718  0.2047713  7.230168
##   0.04037017  9.631930  0.2051405  7.227992
##   0.04229243  9.629089  0.2055201  7.225762
##   0.04430621  9.626185  0.2059113  7.223498
##   0.04641589  9.623222  0.2063132  7.221213
##   0.04862602  9.620207  0.2067258  7.218868
##   0.05094138  9.617179  0.2071424  7.216601
##   0.05336699  9.614127  0.2075661  7.214336
##   0.05590810  9.611100  0.2079893  7.212172
##   0.05857021  9.608053  0.2084198  7.209995
##   0.06135907  9.604880  0.2088756  7.207749
##   0.06428073  9.601685  0.2093400  7.205441
##   0.06734151  9.598465  0.2098149  7.203179
##   0.07054802  9.595229  0.2102988  7.200873
##   0.07390722  9.591999  0.2107874  7.198442
##   0.07742637  9.588777  0.2112825  7.195949
##   0.08111308  9.585572  0.2117836  7.193380
##   0.08497534  9.582468  0.2122774  7.190859
##   0.08902151  9.579651  0.2127333  7.188483
##   0.09326033  9.576983  0.2131740  7.186204
##   0.09770100  9.574659  0.2135679  7.184236
##   0.10235310  9.572521  0.2139414  7.182450
##   0.10722672  9.570591  0.2142918  7.180810
##   0.11233240  9.568850  0.2146221  7.179397
##   0.11768120  9.567373  0.2149221  7.178367
##   0.12328467  9.566161  0.2151899  7.177624
##   0.12915497  9.565263  0.2154159  7.177096
##   0.13530478  9.564617  0.2156114  7.176793
##   0.14174742  9.564234  0.2157733  7.176687
##   0.14849683  9.564164  0.2158947  7.176774
##   0.15556761  9.564254  0.2160024  7.177079
##   0.16297508  9.564666  0.2160684  7.177707
##   0.17073526  9.565001  0.2161619  7.178535
##   0.17886495  9.565678  0.2162101  7.179783
##   0.18738174  9.566552  0.2162400  7.181423
##   0.19630407  9.567758  0.2162267  7.183404
##   0.20565123  9.569308  0.2161706  7.185855
##   0.21544347  9.571319  0.2160479  7.188739
##   0.22570197  9.573763  0.2158607  7.192074
##   0.23644894  9.576735  0.2155967  7.195930
##   0.24770764  9.580172  0.2152732  7.200229
##   0.25950242  9.584151  0.2148715  7.204952
##   0.27185882  9.588584  0.2144086  7.210283
##   0.28480359  9.593489  0.2138813  7.216049
##   0.29836472  9.598379  0.2133813  7.221806
##   0.31257158  9.603711  0.2128233  7.227954
##   0.32745492  9.609260  0.2122460  7.234263
##   0.34304693  9.615220  0.2116189  7.241105
##   0.35938137  9.621323  0.2109923  7.248157
##   0.37649358  9.627920  0.2103045  7.255659
##   0.39442061  9.634681  0.2096267  7.263357
##   0.41320124  9.642017  0.2088787  7.271615
##   0.43287613  9.649754  0.2081042  7.280190
##   0.45348785  9.658122  0.2072504  7.289317
##   0.47508102  9.666658  0.2064145  7.298674
##   0.49770236  9.675846  0.2055000  7.308639
##   0.52140083  9.685601  0.2045258  7.319131
##   0.54622772  9.696131  0.2034517  7.330297
##   0.57223677  9.706070  0.2025676  7.341064
##   0.59948425  9.716265  0.2017098  7.352028
##   0.62802914  9.725649  0.2011256  7.362719
##   0.65793322  9.735695  0.2005093  7.374050
##   0.68926121  9.746603  0.1998366  7.385976
##   0.72208090  9.758551  0.1990649  7.398880
##   0.75646333  9.771653  0.1981733  7.412773
##   0.79248290  9.786011  0.1971399  7.427560
##   0.83021757  9.801746  0.1959383  7.443450
##   0.86974900  9.818985  0.1945363  7.460476
##   0.91116276  9.837870  0.1928948  7.478637
##   0.95454846  9.858554  0.1909658  7.498074
##   1.00000000  9.881205  0.1886905  7.519044
## 
## Tuning parameter 'alpha' was held constant at a value of 1
## RMSE was used to select the optimal model using the smallest value.
## The final values used for the model were alpha = 1 and lambda = 0.1484968.

##    alpha    lambda
## 59     1 0.1484968
## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

Test

if (algo.LASSO.caret == TRUE){
  test.model(model.LASSO.caret, data.test
             ,method = 'glmnet',subopt = "LASSO"
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,draw.limits = TRUE)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   111.0   122.2   125.5   125.3   128.8   138.3 
## [1] "glmnet LASSO Test MSE: 89.9613396773673"

LASSO with CV (w/ filtered train)

Train

if (algo.LASSO.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train2
                                   ,method = "glmnet"
                                   ,subopt = 'LASSO'
                                   ,feature.names = feature.names)
  model.LASSO.caret = returned$model
}
## Aggregating results
## Selecting tuning parameters
## Fitting alpha = 1, lambda = 0.112 on full training set
## glmnet 
## 
## 5712 samples
##  240 predictor
## 
## No pre-processing
## Resampling: Cross-Validated (10 fold) 
## Summary of sample sizes: 5140, 5140, 5141, 5142, 5141, 5142, ... 
## Resampling results across tuning parameters:
## 
##   lambda      RMSE      Rsquared   MAE     
##   0.01000000  7.495376  0.3032179  6.017033
##   0.01047616  7.494598  0.3033247  6.016488
##   0.01097499  7.493795  0.3034350  6.015936
##   0.01149757  7.492964  0.3035495  6.015360
##   0.01204504  7.492104  0.3036682  6.014755
##   0.01261857  7.491208  0.3037922  6.014112
##   0.01321941  7.490276  0.3039217  6.013439
##   0.01384886  7.489306  0.3040570  6.012735
##   0.01450829  7.488301  0.3041975  6.012002
##   0.01519911  7.487263  0.3043431  6.011262
##   0.01592283  7.486188  0.3044947  6.010511
##   0.01668101  7.485054  0.3046556  6.009712
##   0.01747528  7.483891  0.3048213  6.008917
##   0.01830738  7.482693  0.3049926  6.008105
##   0.01917910  7.481461  0.3051693  6.007268
##   0.02009233  7.480186  0.3053539  6.006410
##   0.02104904  7.478859  0.3055472  6.005507
##   0.02205131  7.477471  0.3057508  6.004560
##   0.02310130  7.476042  0.3059616  6.003582
##   0.02420128  7.474531  0.3061871  6.002531
##   0.02535364  7.472977  0.3064208  6.001445
##   0.02656088  7.471360  0.3066663  6.000290
##   0.02782559  7.469690  0.3069219  5.999090
##   0.02915053  7.467942  0.3071924  5.997820
##   0.03053856  7.466157  0.3074705  5.996494
##   0.03199267  7.464302  0.3077620  5.995118
##   0.03351603  7.462417  0.3080603  5.993743
##   0.03511192  7.460495  0.3083668  5.992344
##   0.03678380  7.458542  0.3086803  5.990896
##   0.03853529  7.456555  0.3090013  5.989427
##   0.04037017  7.454565  0.3093248  5.987968
##   0.04229243  7.452512  0.3096621  5.986468
##   0.04430621  7.450447  0.3100048  5.984988
##   0.04641589  7.448365  0.3103543  5.983535
##   0.04862602  7.446270  0.3107103  5.982100
##   0.05094138  7.444212  0.3110643  5.980757
##   0.05336699  7.442193  0.3114162  5.979510
##   0.05590810  7.440259  0.3117583  5.978357
##   0.05857021  7.438403  0.3120921  5.977263
##   0.06135907  7.436507  0.3124397  5.976128
##   0.06428073  7.434660  0.3127851  5.975117
##   0.06734151  7.432771  0.3131461  5.974056
##   0.07054802  7.431003  0.3134916  5.973001
##   0.07390722  7.429349  0.3138232  5.971926
##   0.07742637  7.427856  0.3141319  5.971009
##   0.08111308  7.426398  0.3144414  5.970143
##   0.08497534  7.425132  0.3147249  5.969384
##   0.08902151  7.424052  0.3149840  5.968749
##   0.09326033  7.423124  0.3152255  5.968160
##   0.09770100  7.422388  0.3154421  5.967650
##   0.10235310  7.421822  0.3156389  5.967285
##   0.10722672  7.421367  0.3158267  5.967133
##   0.11233240  7.421153  0.3159815  5.967016
##   0.11768120  7.421166  0.3161059  5.967026
##   0.12328467  7.421577  0.3161671  5.967393
##   0.12915497  7.422344  0.3161733  5.968139
##   0.13530478  7.423460  0.3161296  5.969251
##   0.14174742  7.424985  0.3160248  5.970699
##   0.14849683  7.426959  0.3158492  5.972637
##   0.15556761  7.429349  0.3156072  5.974963
##   0.16297508  7.432231  0.3152851  5.977672
##   0.17073526  7.435705  0.3148621  5.980737
##   0.17886495  7.439760  0.3143412  5.984225
##   0.18738174  7.444172  0.3137690  5.987994
##   0.19630407  7.448938  0.3131455  5.991898
##   0.20565123  7.453776  0.3125279  5.995958
##   0.21544347  7.459149  0.3118205  6.000428
##   0.22570197  7.464824  0.3110695  6.005179
##   0.23644894  7.470910  0.3102576  6.010256
##   0.24770764  7.477218  0.3094225  6.015633
##   0.25950242  7.484087  0.3084967  6.021495
##   0.27185882  7.491245  0.3075380  6.027682
##   0.28480359  7.499005  0.3064780  6.034375
##   0.29836472  7.506891  0.3054199  6.041075
##   0.31257158  7.515320  0.3042793  6.048175
##   0.32745492  7.523868  0.3031546  6.055575
##   0.34304693  7.532648  0.3020171  6.063226
##   0.35938137  7.541089  0.3009935  6.070619
##   0.37649358  7.550056  0.2999083  6.078383
##   0.39442061  7.559168  0.2988582  6.086252
##   0.41320124  7.569174  0.2976696  6.094762
##   0.43287613  7.580123  0.2963356  6.104071
##   0.45348785  7.592047  0.2948440  6.114199
##   0.47508102  7.605041  0.2931706  6.125449
##   0.49770236  7.619047  0.2913274  6.137403
##   0.52140083  7.633565  0.2894290  6.149636
##   0.54622772  7.648274  0.2875633  6.162288
##   0.57223677  7.661823  0.2860740  6.174261
##   0.59948425  7.676152  0.2844974  6.186892
##   0.62802914  7.690515  0.2830424  6.199805
##   0.65793322  7.705672  0.2815174  6.213444
##   0.68926121  7.720915  0.2801158  6.227100
##   0.72208090  7.736995  0.2786637  6.241277
##   0.75646333  7.753360  0.2773282  6.255863
##   0.79248290  7.771268  0.2757834  6.271433
##   0.83021757  7.790855  0.2739927  6.288138
##   0.86974900  7.812295  0.2719016  6.306271
##   0.91116276  7.835760  0.2694516  6.325996
##   0.95454846  7.861433  0.2665710  6.347355
##   1.00000000  7.889514  0.2631723  6.370364
## 
## Tuning parameter 'alpha' was held constant at a value of 1
## RMSE was used to select the optimal model using the smallest value.
## The final values used for the model were alpha = 1 and lambda = 0.1123324.

##    alpha    lambda
## 53     1 0.1123324
## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

Test

if (algo.LASSO.caret == TRUE){
  test.model(model.LASSO.caret, data.test
             ,method = 'glmnet',subopt = "LASSO"
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,draw.limits = TRUE)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   108.6   120.9   124.2   124.1   127.6   136.7 
## [1] "glmnet LASSO Test MSE: 90.9570476798876"

LARS with CV (w/ full train)

Train

if (algo.LARS.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train
                                   ,method = "lars"
                                   ,subopt = 'NULL'
                                   ,feature.names = feature.names)
  model.LARS.caret = returned$model
}
## Warning in nominalTrainWorkflow(x = x, y = y, wts = weights, info =
## trainInfo, : There were missing values in resampled performance measures.
## Aggregating results
## Selecting tuning parameters
## Fitting fraction = 0.414 on full training set
## Least Angle Regression 
## 
## 6002 samples
##  240 predictor
## 
## Pre-processing: centered (240), scaled (240) 
## Resampling: Cross-Validated (10 fold) 
## Summary of sample sizes: 5402, 5401, 5402, 5401, 5402, 5402, ... 
## Resampling results across tuning parameters:
## 
##   fraction    RMSE       Rsquared   MAE     
##   0.00000000  10.790951        NaN  8.228019
##   0.01010101  10.669060  0.1070230  8.138536
##   0.02020202  10.560541  0.1070230  8.060271
##   0.03030303  10.465810  0.1070230  7.991861
##   0.04040404  10.386326  0.1162167  7.934314
##   0.05050505  10.312043  0.1274646  7.879487
##   0.06060606  10.246557  0.1363653  7.829608
##   0.07070707  10.185586  0.1473175  7.780281
##   0.08080808  10.127085  0.1574593  7.732619
##   0.09090909  10.072015  0.1655614  7.687281
##   0.10101010  10.020612  0.1719111  7.643737
##   0.11111111   9.973306  0.1776243  7.602056
##   0.12121212   9.928466  0.1834298  7.562547
##   0.13131313   9.886562  0.1881846  7.524680
##   0.14141414   9.847636  0.1920539  7.488610
##   0.15151515   9.811724  0.1951848  7.454165
##   0.16161616   9.778858  0.1977034  7.421141
##   0.17171717   9.749093  0.1997137  7.389570
##   0.18181818   9.723498  0.2012598  7.360894
##   0.19191919   9.701326  0.2029834  7.336397
##   0.20202020   9.680750  0.2050612  7.314649
##   0.21212121   9.662600  0.2068439  7.294773
##   0.22222222   9.645988  0.2085230  7.276515
##   0.23232323   9.631498  0.2099801  7.260246
##   0.24242424   9.619427  0.2112130  7.246357
##   0.25252525   9.608629  0.2123226  7.233749
##   0.26262626   9.599441  0.2132789  7.223253
##   0.27272727   9.591521  0.2141265  7.214094
##   0.28282828   9.585056  0.2148156  7.206355
##   0.29292929   9.580052  0.2153049  7.200305
##   0.30303030   9.575822  0.2157248  7.195157
##   0.31313131   9.572714  0.2159946  7.191166
##   0.32323232   9.570335  0.2161657  7.187688
##   0.33333333   9.568603  0.2162471  7.184920
##   0.34343434   9.567291  0.2162808  7.182786
##   0.35353535   9.566363  0.2162641  7.181173
##   0.36363636   9.565693  0.2162224  7.179890
##   0.37373737   9.565134  0.2161761  7.178877
##   0.38383838   9.564881  0.2160882  7.178193
##   0.39393939   9.564542  0.2160292  7.177578
##   0.40404040   9.564349  0.2159567  7.177158
##   0.41414141   9.564308  0.2158683  7.176981
##   0.42424242   9.564362  0.2157729  7.176874
##   0.43434343   9.564487  0.2156753  7.176826
##   0.44444444   9.564859  0.2155427  7.176949
##   0.45454545   9.565384  0.2153894  7.177193
##   0.46464646   9.566064  0.2152178  7.177648
##   0.47474747   9.566832  0.2150378  7.178126
##   0.48484848   9.567861  0.2148213  7.178823
##   0.49494949   9.569020  0.2145895  7.179661
##   0.50505051   9.570252  0.2143518  7.180651
##   0.51515152   9.571568  0.2141081  7.181773
##   0.52525253   9.573034  0.2138456  7.183047
##   0.53535354   9.574594  0.2135740  7.184359
##   0.54545455   9.576245  0.2132935  7.185736
##   0.55555556   9.578021  0.2129975  7.187239
##   0.56565657   9.580040  0.2126663  7.188990
##   0.57575758   9.582209  0.2123155  7.190829
##   0.58585859   9.584503  0.2119495  7.192696
##   0.59595960   9.586966  0.2115619  7.194679
##   0.60606061   9.589419  0.2111816  7.196594
##   0.61616162   9.591955  0.2107939  7.198545
##   0.62626263   9.594559  0.2103995  7.200486
##   0.63636364   9.597217  0.2100002  7.202393
##   0.64646465   9.599855  0.2096089  7.204257
##   0.65656566   9.602485  0.2092233  7.206128
##   0.66666667   9.605158  0.2088353  7.208045
##   0.67676768   9.607850  0.2084492  7.209951
##   0.68686869   9.610514  0.2080724  7.211827
##   0.69696970   9.613190  0.2076977  7.213760
##   0.70707071   9.615976  0.2073099  7.215815
##   0.71717172   9.618831  0.2069156  7.217918
##   0.72727273   9.621711  0.2065213  7.220141
##   0.73737374   9.624653  0.2061209  7.222391
##   0.74747475   9.627606  0.2057218  7.224680
##   0.75757576   9.630587  0.2053222  7.227021
##   0.76767677   9.633590  0.2049235  7.229393
##   0.77777778   9.636651  0.2045198  7.231798
##   0.78787879   9.639791  0.2041080  7.234342
##   0.79797980   9.642950  0.2036973  7.236989
##   0.80808081   9.646134  0.2032879  7.239657
##   0.81818182   9.649347  0.2028776  7.242390
##   0.82828283   9.652601  0.2024652  7.245197
##   0.83838384   9.655887  0.2020513  7.248079
##   0.84848485   9.659210  0.2016352  7.251041
##   0.85858586   9.662568  0.2012179  7.254020
##   0.86868687   9.665983  0.2007963  7.257001
##   0.87878788   9.669488  0.2003657  7.260014
##   0.88888889   9.673129  0.1999189  7.263088
##   0.89898990   9.676840  0.1994659  7.266201
##   0.90909091   9.680535  0.1990190  7.269314
##   0.91919192   9.684212  0.1985789  7.272425
##   0.92929293   9.687921  0.1981387  7.275613
##   0.93939394   9.691633  0.1977021  7.278757
##   0.94949495   9.695338  0.1972702  7.281892
##   0.95959596   9.699087  0.1968357  7.285056
##   0.96969697   9.702870  0.1964002  7.288215
##   0.97979798   9.706697  0.1959624  7.291412
##   0.98989899   9.710592  0.1955188  7.294606
##   1.00000000   9.714503  0.1950768  7.297804
## 
## RMSE was used to select the optimal model using the smallest value.
## The final value used for the model was fraction = 0.4141414.

##     fraction
## 42 0.4141414
## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

Test

if (algo.LARS.caret == TRUE){
  test.model(model.LARS.caret, data.test
             ,method = 'lars',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,draw.limits = TRUE)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   111.0   122.2   125.5   125.3   128.8   138.3 
## [1] "lars  Test MSE: 89.9672610672758"

LARS with CV (w/ filtered train)

Train

if (algo.LARS.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train2
                                   ,method = "lars"
                                   ,subopt = 'NULL'
                                   ,feature.names = feature.names)
  model.LARS.caret = returned$model
}
## Warning in nominalTrainWorkflow(x = x, y = y, wts = weights, info =
## trainInfo, : There were missing values in resampled performance measures.
## Aggregating results
## Selecting tuning parameters
## Fitting fraction = 0.485 on full training set
## Least Angle Regression 
## 
## 5712 samples
##  240 predictor
## 
## Pre-processing: centered (240), scaled (240) 
## Resampling: Cross-Validated (10 fold) 
## Summary of sample sizes: 5140, 5140, 5141, 5142, 5141, 5142, ... 
## Resampling results across tuning parameters:
## 
##   fraction    RMSE      Rsquared   MAE     
##   0.00000000  8.965132        NaN  7.151413
##   0.01010101  8.827399  0.1500471  7.053747
##   0.02020202  8.703557  0.1500471  6.967931
##   0.03030303  8.594207  0.1500471  6.891913
##   0.04040404  8.499253  0.1677379  6.825897
##   0.05050505  8.410441  0.1840918  6.765352
##   0.06060606  8.329136  0.1941970  6.709788
##   0.07070707  8.257852  0.2032007  6.660182
##   0.08080808  8.189768  0.2172935  6.609997
##   0.09090909  8.124849  0.2291276  6.560249
##   0.10101010  8.063873  0.2385402  6.511710
##   0.11111111  8.007761  0.2462789  6.466284
##   0.12121212  7.955037  0.2541572  6.423746
##   0.13131313  7.904846  0.2611683  6.383169
##   0.14141414  7.857920  0.2669333  6.344746
##   0.15151515  7.814319  0.2716456  6.308263
##   0.16161616  7.774099  0.2754747  6.274138
##   0.17171717  7.737422  0.2785792  6.242105
##   0.18181818  7.704929  0.2814914  6.213154
##   0.19191919  7.675310  0.2845708  6.186705
##   0.20202020  7.649490  0.2873692  6.164155
##   0.21212121  7.624930  0.2905413  6.143189
##   0.22222222  7.601360  0.2936384  6.123110
##   0.23232323  7.579593  0.2963706  6.104337
##   0.24242424  7.559883  0.2987312  6.087481
##   0.25252525  7.543125  0.3006710  6.073079
##   0.26262626  7.528196  0.3025591  6.060016
##   0.27272727  7.514239  0.3044106  6.047532
##   0.28282828  7.501918  0.3060651  6.037118
##   0.29292929  7.491020  0.3075476  6.027846
##   0.30303030  7.481449  0.3088360  6.019466
##   0.31313131  7.472751  0.3100092  6.011973
##   0.32323232  7.465200  0.3110163  6.005546
##   0.33333333  7.458341  0.3119253  5.999746
##   0.34343434  7.452279  0.3127138  5.994673
##   0.35353535  7.447045  0.3133879  5.990417
##   0.36363636  7.442159  0.3140237  5.986342
##   0.37373737  7.437813  0.3145819  5.982614
##   0.38383838  7.434105  0.3150420  5.979363
##   0.39393939  7.430926  0.3154256  5.976552
##   0.40404040  7.428485  0.3156833  5.974292
##   0.41414141  7.426361  0.3158976  5.972258
##   0.42424242  7.424590  0.3160577  5.970532
##   0.43434343  7.423274  0.3161473  5.969246
##   0.44444444  7.422246  0.3162008  5.968307
##   0.45454545  7.421503  0.3162121  5.967551
##   0.46464646  7.420982  0.3161934  5.967013
##   0.47474747  7.420822  0.3161152  5.966777
##   0.48484848  7.420724  0.3160340  5.966641
##   0.49494949  7.420895  0.3159102  5.966663
##   0.50505051  7.421113  0.3157855  5.966712
##   0.51515152  7.421497  0.3156383  5.966959
##   0.52525253  7.422013  0.3154732  5.967298
##   0.53535354  7.422586  0.3153051  5.967773
##   0.54545455  7.423301  0.3151182  5.968262
##   0.55555556  7.424173  0.3149097  5.968806
##   0.56565657  7.425156  0.3146853  5.969427
##   0.57575758  7.426238  0.3144477  5.970158
##   0.58585859  7.427403  0.3142008  5.970918
##   0.59595960  7.428630  0.3139469  5.971676
##   0.60606061  7.429975  0.3136749  5.972534
##   0.61616162  7.431382  0.3133963  5.973418
##   0.62626263  7.432867  0.3131082  5.974276
##   0.63636364  7.434408  0.3128147  5.975087
##   0.64646465  7.435956  0.3125247  5.975931
##   0.65656566  7.437512  0.3122389  5.976818
##   0.66666667  7.439067  0.3119570  5.977755
##   0.67676768  7.440734  0.3116591  5.978746
##   0.68686869  7.442460  0.3113550  5.979764
##   0.69696970  7.444286  0.3110378  5.980874
##   0.70707071  7.446209  0.3107081  5.982149
##   0.71717172  7.448159  0.3103782  5.983462
##   0.72727273  7.450196  0.3100363  5.984888
##   0.73737374  7.452278  0.3096915  5.986389
##   0.74747475  7.454392  0.3093449  5.987909
##   0.75757576  7.456512  0.3090009  5.989470
##   0.76767677  7.458642  0.3086582  5.991039
##   0.77777778  7.460808  0.3083119  5.992621
##   0.78787879  7.463023  0.3079598  5.994216
##   0.79797980  7.465269  0.3076060  5.995842
##   0.80808081  7.467559  0.3072485  5.997547
##   0.81818182  7.469820  0.3068999  5.999205
##   0.82828283  7.472083  0.3065548  6.000840
##   0.83838384  7.474333  0.3062160  6.002420
##   0.84848485  7.476582  0.3058819  6.003989
##   0.85858586  7.478831  0.3055518  6.005535
##   0.86868687  7.481077  0.3052262  6.007042
##   0.87878788  7.483320  0.3049054  6.008555
##   0.88888889  7.485632  0.3045765  6.010139
##   0.89898990  7.487956  0.3042490  6.011779
##   0.90909091  7.490310  0.3039200  6.013489
##   0.91919192  7.492682  0.3035915  6.015187
##   0.92929293  7.495099  0.3032591  6.016865
##   0.93939394  7.497567  0.3029220  6.018580
##   0.94949495  7.500104  0.3025771  6.020343
##   0.95959596  7.502697  0.3022261  6.022139
##   0.96969697  7.505342  0.3018694  6.023996
##   0.97979798  7.508020  0.3015108  6.025913
##   0.98989899  7.510757  0.3011470  6.027837
##   1.00000000  7.513584  0.3007718  6.029796
## 
## RMSE was used to select the optimal model using the smallest value.
## The final value used for the model was fraction = 0.4848485.

##     fraction
## 49 0.4848485
## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

Test

if (algo.LARS.caret == TRUE){
  test.model(model.LARS.caret, data.test
             ,method = 'lars',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,draw.limits = TRUE)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   108.6   120.9   124.2   124.1   127.6   136.7 
## [1] "lars  Test MSE: 90.9710422668425"